var/home/core/zuul-output/0000755000175000017500000000000015114571142014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114602404015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005010001515114602376017672 0ustar rootrootDec 05 14:58:40 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 14:58:40 crc restorecon[4671]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:40 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 14:58:41 crc restorecon[4671]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 14:58:41 crc kubenswrapper[4840]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 14:58:41 crc kubenswrapper[4840]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 14:58:41 crc kubenswrapper[4840]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 14:58:41 crc kubenswrapper[4840]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 14:58:41 crc kubenswrapper[4840]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 14:58:41 crc kubenswrapper[4840]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.923907 4840 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927763 4840 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927786 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927792 4840 feature_gate.go:330] unrecognized feature gate: Example Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927797 4840 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927816 4840 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927822 4840 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927827 4840 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927833 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927838 4840 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927843 4840 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927848 4840 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927852 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927857 4840 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927862 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927884 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927889 4840 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927893 4840 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927898 4840 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927903 4840 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927907 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927912 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927917 4840 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927922 4840 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927928 4840 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927934 4840 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927940 4840 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927945 4840 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927949 4840 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927954 4840 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927960 4840 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927965 4840 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927972 4840 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927977 4840 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927982 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927986 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927991 4840 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.927995 4840 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928000 4840 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928004 4840 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928009 4840 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928013 4840 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928018 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928023 4840 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928028 4840 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928034 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928038 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928043 4840 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928048 4840 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928053 4840 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928060 4840 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928065 4840 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928070 4840 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928074 4840 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928079 4840 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928084 4840 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928090 4840 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928096 4840 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928102 4840 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928107 4840 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928112 4840 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928117 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928122 4840 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928126 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928132 4840 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928136 4840 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928144 4840 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928149 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928160 4840 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928165 4840 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928170 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.928177 4840 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928599 4840 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928615 4840 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928627 4840 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928634 4840 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928645 4840 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928651 4840 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928659 4840 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928666 4840 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928672 4840 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928678 4840 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928685 4840 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928690 4840 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928696 4840 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928702 4840 flags.go:64] FLAG: --cgroup-root="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928708 4840 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928714 4840 flags.go:64] FLAG: --client-ca-file="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928719 4840 flags.go:64] FLAG: --cloud-config="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928725 4840 flags.go:64] FLAG: --cloud-provider="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928730 4840 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928737 4840 flags.go:64] FLAG: --cluster-domain="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928742 4840 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928748 4840 flags.go:64] FLAG: --config-dir="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928753 4840 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928759 4840 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928767 4840 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928773 4840 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928778 4840 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928784 4840 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928790 4840 flags.go:64] FLAG: --contention-profiling="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928795 4840 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928801 4840 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928807 4840 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928814 4840 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928821 4840 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928826 4840 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928832 4840 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928838 4840 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928844 4840 flags.go:64] FLAG: --enable-server="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928849 4840 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928857 4840 flags.go:64] FLAG: --event-burst="100" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928885 4840 flags.go:64] FLAG: --event-qps="50" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928891 4840 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928897 4840 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928903 4840 flags.go:64] FLAG: --eviction-hard="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928911 4840 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928917 4840 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928922 4840 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928929 4840 flags.go:64] FLAG: --eviction-soft="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928934 4840 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928940 4840 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928945 4840 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928951 4840 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928957 4840 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928962 4840 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928968 4840 flags.go:64] FLAG: --feature-gates="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928975 4840 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928981 4840 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928986 4840 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928992 4840 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.928999 4840 flags.go:64] FLAG: --healthz-port="10248" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929005 4840 flags.go:64] FLAG: --help="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929011 4840 flags.go:64] FLAG: --hostname-override="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929016 4840 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929022 4840 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929028 4840 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929033 4840 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929038 4840 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929044 4840 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929057 4840 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929062 4840 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929068 4840 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929073 4840 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929079 4840 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929084 4840 flags.go:64] FLAG: --kube-reserved="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929090 4840 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929095 4840 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929101 4840 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929107 4840 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929112 4840 flags.go:64] FLAG: --lock-file="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929117 4840 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929123 4840 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929129 4840 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929138 4840 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929144 4840 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929149 4840 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929154 4840 flags.go:64] FLAG: --logging-format="text" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929160 4840 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929168 4840 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929174 4840 flags.go:64] FLAG: --manifest-url="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929179 4840 flags.go:64] FLAG: --manifest-url-header="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929187 4840 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929193 4840 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929200 4840 flags.go:64] FLAG: --max-pods="110" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929206 4840 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929211 4840 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929217 4840 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929223 4840 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929228 4840 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929234 4840 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929240 4840 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929259 4840 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929265 4840 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929270 4840 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929276 4840 flags.go:64] FLAG: --pod-cidr="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929283 4840 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929292 4840 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929297 4840 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929303 4840 flags.go:64] FLAG: --pods-per-core="0" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929309 4840 flags.go:64] FLAG: --port="10250" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929315 4840 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929320 4840 flags.go:64] FLAG: --provider-id="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929326 4840 flags.go:64] FLAG: --qos-reserved="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929331 4840 flags.go:64] FLAG: --read-only-port="10255" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929337 4840 flags.go:64] FLAG: --register-node="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929343 4840 flags.go:64] FLAG: --register-schedulable="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929348 4840 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929359 4840 flags.go:64] FLAG: --registry-burst="10" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929365 4840 flags.go:64] FLAG: --registry-qps="5" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929371 4840 flags.go:64] FLAG: --reserved-cpus="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929376 4840 flags.go:64] FLAG: --reserved-memory="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929383 4840 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929388 4840 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929394 4840 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929400 4840 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929406 4840 flags.go:64] FLAG: --runonce="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929411 4840 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929417 4840 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929423 4840 flags.go:64] FLAG: --seccomp-default="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929428 4840 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929434 4840 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929440 4840 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929445 4840 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929455 4840 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929460 4840 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929466 4840 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929471 4840 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929477 4840 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929482 4840 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929488 4840 flags.go:64] FLAG: --system-cgroups="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929493 4840 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929504 4840 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929509 4840 flags.go:64] FLAG: --tls-cert-file="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929514 4840 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929521 4840 flags.go:64] FLAG: --tls-min-version="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929527 4840 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929532 4840 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929537 4840 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929542 4840 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929548 4840 flags.go:64] FLAG: --v="2" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929563 4840 flags.go:64] FLAG: --version="false" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929571 4840 flags.go:64] FLAG: --vmodule="" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929578 4840 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.929584 4840 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929733 4840 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929742 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929749 4840 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929755 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929761 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929766 4840 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929771 4840 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929776 4840 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929781 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929786 4840 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929793 4840 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929802 4840 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929809 4840 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929814 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929820 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929825 4840 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929830 4840 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929835 4840 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929840 4840 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929845 4840 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929850 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929854 4840 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929859 4840 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929886 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929892 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929899 4840 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929905 4840 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929910 4840 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929918 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929923 4840 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929929 4840 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929935 4840 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929941 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929947 4840 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929952 4840 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929957 4840 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929961 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929966 4840 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929970 4840 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929975 4840 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929979 4840 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929984 4840 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929989 4840 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.929996 4840 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930001 4840 feature_gate.go:330] unrecognized feature gate: Example Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930005 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930009 4840 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930014 4840 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930018 4840 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930023 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930027 4840 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930032 4840 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930036 4840 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930041 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930045 4840 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930050 4840 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930054 4840 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930059 4840 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930063 4840 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930069 4840 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930076 4840 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930081 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930085 4840 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930090 4840 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930094 4840 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930100 4840 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930104 4840 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930109 4840 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930114 4840 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930118 4840 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.930123 4840 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.930131 4840 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.939393 4840 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.939433 4840 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939540 4840 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939549 4840 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939555 4840 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939560 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939565 4840 feature_gate.go:330] unrecognized feature gate: Example Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939569 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939575 4840 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939581 4840 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939585 4840 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939588 4840 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939592 4840 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939596 4840 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939601 4840 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939605 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939609 4840 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939613 4840 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939616 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939621 4840 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939624 4840 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939628 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939632 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939636 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939640 4840 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939644 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939648 4840 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939651 4840 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939655 4840 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939660 4840 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939673 4840 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939677 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939681 4840 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939685 4840 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939689 4840 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939694 4840 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939709 4840 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939715 4840 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939720 4840 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939725 4840 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939729 4840 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939733 4840 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939737 4840 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939741 4840 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939745 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939749 4840 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939752 4840 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939756 4840 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939760 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939763 4840 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939767 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939771 4840 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939775 4840 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939779 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939782 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939786 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939790 4840 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939793 4840 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939797 4840 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939802 4840 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939806 4840 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939810 4840 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939816 4840 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939822 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939827 4840 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939831 4840 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939836 4840 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939841 4840 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939846 4840 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939852 4840 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939856 4840 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939884 4840 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.939899 4840 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.939907 4840 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940108 4840 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940116 4840 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940121 4840 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940127 4840 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940131 4840 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940136 4840 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940140 4840 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940144 4840 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940149 4840 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940154 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940157 4840 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940161 4840 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940165 4840 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940169 4840 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940173 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940177 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940180 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940184 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940188 4840 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940192 4840 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940196 4840 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940200 4840 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940203 4840 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940207 4840 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940212 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940216 4840 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940219 4840 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940223 4840 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940229 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940233 4840 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940236 4840 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940240 4840 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940244 4840 feature_gate.go:330] unrecognized feature gate: Example Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940248 4840 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940261 4840 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940265 4840 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940269 4840 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940273 4840 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940277 4840 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940280 4840 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940285 4840 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940288 4840 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940293 4840 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940298 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940303 4840 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940309 4840 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940313 4840 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940318 4840 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940324 4840 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940329 4840 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940334 4840 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940341 4840 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940346 4840 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940350 4840 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940354 4840 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940359 4840 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940364 4840 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940368 4840 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940373 4840 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940379 4840 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940385 4840 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940390 4840 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940394 4840 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940398 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940401 4840 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940405 4840 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940409 4840 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940413 4840 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940417 4840 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940420 4840 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 14:58:41 crc kubenswrapper[4840]: W1205 14:58:41.940432 4840 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.940439 4840 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.940828 4840 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.943452 4840 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.943523 4840 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.944011 4840 server.go:997] "Starting client certificate rotation" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.944033 4840 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.944493 4840 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-03 15:05:01.712171643 +0000 UTC Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.944590 4840 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 696h6m19.767583218s for next certificate rotation Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.949885 4840 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.953775 4840 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.961717 4840 log.go:25] "Validated CRI v1 runtime API" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.981468 4840 log.go:25] "Validated CRI v1 image API" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.982776 4840 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.985042 4840 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-14-54-32-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.985102 4840 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.998583 4840 manager.go:217] Machine: {Timestamp:2025-12-05 14:58:41.997005252 +0000 UTC m=+0.338067886 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:7c52e96b-a430-4f06-ad5e-bc57a22eeb52 BootID:a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:26:36:9d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:26:36:9d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:7d:a1:9f Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:b9:af:f5 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:e5:23:a5 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:dd:0b:de Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ba:1f:3f:17:05:ac Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:0a:d3:37:9d:00:e5 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.998792 4840 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.998928 4840 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.999609 4840 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 14:58:41 crc kubenswrapper[4840]: I1205 14:58:41.999772 4840 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:41.999805 4840 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:41.999997 4840 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.000005 4840 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.000170 4840 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.000189 4840 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.000334 4840 state_mem.go:36] "Initialized new in-memory state store" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.000411 4840 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.002204 4840 kubelet.go:418] "Attempting to sync node with API server" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.002240 4840 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.002264 4840 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.002276 4840 kubelet.go:324] "Adding apiserver pod source" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.002304 4840 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.006894 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.007205 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.007213 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.007259 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.007564 4840 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.008370 4840 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009380 4840 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009892 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009913 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009920 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009928 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009939 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009947 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009954 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009964 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009979 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009988 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.009999 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.010007 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.010420 4840 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.010789 4840 server.go:1280] "Started kubelet" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.011253 4840 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.011220 4840 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.011322 4840 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.012466 4840 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 14:58:42 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.012738 4840 server.go:460] "Adding debug handlers to kubelet server" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.013017 4840 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.013048 4840 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.013074 4840 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 13:47:38.307521924 +0000 UTC Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.013376 4840 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.013399 4840 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.013392 4840 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.013488 4840 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.013599 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="200ms" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.013070 4840 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.195:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e59b14cf10df1 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 14:58:42.010762737 +0000 UTC m=+0.351825351,LastTimestamp:2025-12-05 14:58:42.010762737 +0000 UTC m=+0.351825351,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.016149 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.016219 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.017458 4840 factory.go:55] Registering systemd factory Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.017485 4840 factory.go:221] Registration of the systemd container factory successfully Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.017940 4840 factory.go:153] Registering CRI-O factory Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.017978 4840 factory.go:221] Registration of the crio container factory successfully Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.018069 4840 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.018102 4840 factory.go:103] Registering Raw factory Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.018167 4840 manager.go:1196] Started watching for new ooms in manager Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.019410 4840 manager.go:319] Starting recovery of all containers Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031466 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031569 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031584 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031598 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031615 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031631 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031644 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031657 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031672 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031716 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031729 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031744 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031757 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031772 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031787 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031801 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031818 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031830 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031842 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031852 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031881 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031894 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031920 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031934 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031945 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031964 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031979 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.031992 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.032004 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.032014 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.032027 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.032039 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.032052 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.032064 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.033888 4840 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.033940 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.033956 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.033968 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.033979 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034439 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034455 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034469 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034482 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034495 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034511 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034530 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034545 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034558 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034572 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034584 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034595 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034608 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034620 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034642 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034657 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034674 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034689 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034706 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034719 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034730 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034743 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034755 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034767 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034781 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034795 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034808 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034822 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034834 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034845 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034856 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.034886 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.035772 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.035786 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.035796 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.035807 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.035820 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036175 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036204 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036217 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036228 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036239 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036250 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036261 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036272 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036289 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036302 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036314 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036324 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036334 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036345 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036356 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036365 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036377 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036387 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036401 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036414 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036427 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036438 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036448 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036460 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036472 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036484 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036496 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036509 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036521 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036542 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036555 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036569 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036583 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036594 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036604 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036614 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036624 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036635 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036655 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036665 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036674 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036684 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036693 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036703 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036713 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036723 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036732 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036742 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036784 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036795 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036805 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036814 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036822 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036831 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036841 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036850 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.036859 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039121 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039174 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039188 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039200 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039211 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039221 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039236 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039247 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039257 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039269 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039281 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039291 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039300 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039310 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039320 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039331 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039341 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039350 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039361 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039374 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039385 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039396 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039406 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039416 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039431 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039442 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039452 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039462 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039475 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039490 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039501 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039513 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039528 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039543 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039561 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039572 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039588 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039601 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039611 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039623 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039633 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039643 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039653 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039663 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039677 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039689 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.039700 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040101 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040124 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040134 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040144 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040180 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040189 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040199 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040209 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040219 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040228 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040265 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040276 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040286 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040296 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040305 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040315 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040345 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040354 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040364 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040374 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040383 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040392 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040423 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040434 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040443 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040453 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040464 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040474 4840 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040500 4840 reconstruct.go:97] "Volume reconstruction finished" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.040510 4840 reconciler.go:26] "Reconciler: start to sync state" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.043745 4840 manager.go:324] Recovery completed Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.054015 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.055420 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.055465 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.055476 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.056190 4840 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.056207 4840 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.056227 4840 state_mem.go:36] "Initialized new in-memory state store" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.062942 4840 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.065390 4840 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.065444 4840 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.065479 4840 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.065520 4840 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.066024 4840 policy_none.go:49] "None policy: Start" Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.066369 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.066422 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.066897 4840 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.066924 4840 state_mem.go:35] "Initializing new in-memory state store" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.114523 4840 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.122043 4840 manager.go:334] "Starting Device Plugin manager" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.122328 4840 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.122370 4840 server.go:79] "Starting device plugin registration server" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.122797 4840 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.122812 4840 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.122939 4840 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.123037 4840 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.123047 4840 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.129880 4840 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.166346 4840 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.166454 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.167568 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.167596 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.167605 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.167715 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.167926 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.167975 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.168512 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.168538 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.168548 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.169291 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.169328 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.169387 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.170910 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.170959 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.170973 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171419 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171460 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171478 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171486 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171504 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171512 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171610 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171767 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.171798 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172411 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172455 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172566 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172589 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172598 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172727 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172889 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.172922 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173590 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173613 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173622 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173753 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173791 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173803 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173807 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.173830 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.174436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.174462 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.174473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.214382 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="400ms" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.224113 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.225412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.225480 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.225504 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.225548 4840 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.226170 4840 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.195:6443: connect: connection refused" node="crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242575 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242632 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242667 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242895 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242928 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242950 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242978 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.242997 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.243036 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.243061 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.243081 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.243246 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.243311 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.243335 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.243355 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345122 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345213 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345131 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345257 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345231 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345306 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345335 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345306 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345396 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345377 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345400 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345357 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345521 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345554 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345585 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345590 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345620 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345663 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345660 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345691 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345697 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345726 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345735 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345757 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345773 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345788 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345826 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345882 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345888 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.345957 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.426769 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.428225 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.428276 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.428287 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.428315 4840 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.428794 4840 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.195:6443: connect: connection refused" node="crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.496856 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.519063 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.539426 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.548964 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.553373 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.622540 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="800ms" Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.687126 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-5f7917be703015be90bb8fb04c6614392aa7f45f4cffabe09b115a54c1c9bb01 WatchSource:0}: Error finding container 5f7917be703015be90bb8fb04c6614392aa7f45f4cffabe09b115a54c1c9bb01: Status 404 returned error can't find the container with id 5f7917be703015be90bb8fb04c6614392aa7f45f4cffabe09b115a54c1c9bb01 Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.687636 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-94a3bb00479bdae96aabc0594e770f60a0d44a60b15f321caa39f9e3b0627f91 WatchSource:0}: Error finding container 94a3bb00479bdae96aabc0594e770f60a0d44a60b15f321caa39f9e3b0627f91: Status 404 returned error can't find the container with id 94a3bb00479bdae96aabc0594e770f60a0d44a60b15f321caa39f9e3b0627f91 Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.697596 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-e546c3b9e75a2d4853ef535888c15198cabfd221277566237b55c65fd1363cc0 WatchSource:0}: Error finding container e546c3b9e75a2d4853ef535888c15198cabfd221277566237b55c65fd1363cc0: Status 404 returned error can't find the container with id e546c3b9e75a2d4853ef535888c15198cabfd221277566237b55c65fd1363cc0 Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.699479 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-9c44a0c91c121dd8bdc258973379ca3cc55420ae5b932d14f9a060598fe9cc4b WatchSource:0}: Error finding container 9c44a0c91c121dd8bdc258973379ca3cc55420ae5b932d14f9a060598fe9cc4b: Status 404 returned error can't find the container with id 9c44a0c91c121dd8bdc258973379ca3cc55420ae5b932d14f9a060598fe9cc4b Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.700714 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-178a756c7dee8d6c910e07527ac4fe62801dbc08029b006a398a9f29d2461a5d WatchSource:0}: Error finding container 178a756c7dee8d6c910e07527ac4fe62801dbc08029b006a398a9f29d2461a5d: Status 404 returned error can't find the container with id 178a756c7dee8d6c910e07527ac4fe62801dbc08029b006a398a9f29d2461a5d Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.834698 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.835497 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.835600 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.836370 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.836409 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.836418 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:42 crc kubenswrapper[4840]: I1205 14:58:42.836438 4840 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.836923 4840 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.195:6443: connect: connection refused" node="crc" Dec 05 14:58:42 crc kubenswrapper[4840]: W1205 14:58:42.999336 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:42 crc kubenswrapper[4840]: E1205 14:58:42.999454 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.012823 4840 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.013924 4840 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 11:40:16.386768405 +0000 UTC Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.070321 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e546c3b9e75a2d4853ef535888c15198cabfd221277566237b55c65fd1363cc0"} Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.071477 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"94a3bb00479bdae96aabc0594e770f60a0d44a60b15f321caa39f9e3b0627f91"} Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.072385 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5f7917be703015be90bb8fb04c6614392aa7f45f4cffabe09b115a54c1c9bb01"} Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.073445 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"178a756c7dee8d6c910e07527ac4fe62801dbc08029b006a398a9f29d2461a5d"} Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.074320 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9c44a0c91c121dd8bdc258973379ca3cc55420ae5b932d14f9a060598fe9cc4b"} Dec 05 14:58:43 crc kubenswrapper[4840]: W1205 14:58:43.193573 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:43 crc kubenswrapper[4840]: E1205 14:58:43.193663 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:43 crc kubenswrapper[4840]: E1205 14:58:43.423917 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="1.6s" Dec 05 14:58:43 crc kubenswrapper[4840]: W1205 14:58:43.563334 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:43 crc kubenswrapper[4840]: E1205 14:58:43.563490 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.637466 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.638633 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.638662 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.638671 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:43 crc kubenswrapper[4840]: I1205 14:58:43.638690 4840 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 14:58:43 crc kubenswrapper[4840]: E1205 14:58:43.639122 4840 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.195:6443: connect: connection refused" node="crc" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.012594 4840 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.014110 4840 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 21:39:32.187177009 +0000 UTC Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.078094 4840 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3" exitCode=0 Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.078168 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3"} Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.078260 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.081546 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.081599 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.081610 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.083549 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88"} Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.083585 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba"} Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.083600 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f"} Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.085340 4840 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5" exitCode=0 Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.085368 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5"} Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.085388 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.086448 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.086479 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.086488 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.086893 4840 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7" exitCode=0 Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.086973 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7"} Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.087002 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.087659 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.087685 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.087697 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.088486 4840 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="936df7e95c0f2f0e07be977579a20a7745f3b5d720b8a689ee7d1ae82afb86d1" exitCode=0 Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.088523 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"936df7e95c0f2f0e07be977579a20a7745f3b5d720b8a689ee7d1ae82afb86d1"} Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.088600 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.089494 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.089540 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.089549 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.089815 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.090394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.090416 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:44 crc kubenswrapper[4840]: I1205 14:58:44.090425 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:44 crc kubenswrapper[4840]: W1205 14:58:44.733294 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:44 crc kubenswrapper[4840]: E1205 14:58:44.733654 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.195:6443: connect: connection refused" logger="UnhandledError" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.012272 4840 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.195:6443: connect: connection refused Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.014551 4840 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 12:31:51.353036887 +0000 UTC Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.014619 4840 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 573h33m6.338422351s for next certificate rotation Dec 05 14:58:45 crc kubenswrapper[4840]: E1205 14:58:45.024364 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="3.2s" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.092956 4840 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="808689b257063969a18b97ec8e5577e5a4d159e318f43bfd6d210ef204cfad54" exitCode=0 Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.093070 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"808689b257063969a18b97ec8e5577e5a4d159e318f43bfd6d210ef204cfad54"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.093085 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.094392 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.094442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.094453 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.096358 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.096954 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c876f3a0a8f04321a7811abc93079381e36b22b7757233f72a40ddc96858bed7"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.097597 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.097636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.097653 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.101040 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.101142 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.102040 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.102073 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.102082 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.103444 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.103497 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.103510 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.103595 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.104446 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.104472 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.104484 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.110157 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.110188 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.110200 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.110211 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4"} Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.239633 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.245844 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.245891 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.245903 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:45 crc kubenswrapper[4840]: I1205 14:58:45.245951 4840 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.118354 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8"} Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.118492 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.119751 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.119797 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.119811 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.120769 4840 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e37d7251dd989488b5f8a285b1dd9a6c641a7cfd30132f10e2dd9509a3fe67ca" exitCode=0 Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.120848 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e37d7251dd989488b5f8a285b1dd9a6c641a7cfd30132f10e2dd9509a3fe67ca"} Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.120962 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.120980 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.121004 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.121027 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.121210 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.121966 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122000 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122011 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122552 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122606 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122649 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122661 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122624 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122702 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:46 crc kubenswrapper[4840]: I1205 14:58:46.122711 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.127363 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"803e7f08982625206e2d374e95b097200e8c3a3963e861fe47d14a86f3e8993d"} Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.127407 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c290c6bb071d9ce0895f274d5d328b651da6aa725b11d89869552839bd894ffe"} Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.127418 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"59b575e2ef89738f66c3387a2063d2ffea7b472cf638a09b37f01b425a79f65d"} Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.127429 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eed1324ade5ab9af8572065fee2c84c272bb32491bfe9fcd83b392d6a5be2fb2"} Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.127439 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.127485 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.128421 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.128502 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.128535 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.634960 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.635721 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.637106 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.637162 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:47 crc kubenswrapper[4840]: I1205 14:58:47.637172 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.136027 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a1c51a8a9cc0e6a45bcda217ab18c6cf25f4d75212b23bfbae21a566266891fc"} Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.136233 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.137734 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.137821 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.137835 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.725147 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.725349 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.725396 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.726679 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.726739 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:48 crc kubenswrapper[4840]: I1205 14:58:48.726762 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.139091 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.140304 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.140356 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.140365 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.883550 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.883696 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.883733 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.885282 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.885343 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:49 crc kubenswrapper[4840]: I1205 14:58:49.885361 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.305200 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.305508 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.307086 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.307124 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.307142 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.697398 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.889475 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.889675 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.890718 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.890752 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:50 crc kubenswrapper[4840]: I1205 14:58:50.890763 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.145491 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.146899 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.146951 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.146969 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.762795 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.763027 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.764108 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.764146 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:51 crc kubenswrapper[4840]: I1205 14:58:51.764160 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.033113 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.033422 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.035054 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.035107 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.035123 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:52 crc kubenswrapper[4840]: E1205 14:58:52.130026 4840 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.188452 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.188965 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.190626 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.190713 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:52 crc kubenswrapper[4840]: I1205 14:58:52.190730 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:53 crc kubenswrapper[4840]: I1205 14:58:53.492099 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:53 crc kubenswrapper[4840]: I1205 14:58:53.492252 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:53 crc kubenswrapper[4840]: I1205 14:58:53.496571 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:53 crc kubenswrapper[4840]: I1205 14:58:53.496801 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:53 crc kubenswrapper[4840]: I1205 14:58:53.496917 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:53 crc kubenswrapper[4840]: I1205 14:58:53.499057 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:54 crc kubenswrapper[4840]: I1205 14:58:54.152987 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:54 crc kubenswrapper[4840]: I1205 14:58:54.154249 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:54 crc kubenswrapper[4840]: I1205 14:58:54.154298 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:54 crc kubenswrapper[4840]: I1205 14:58:54.154309 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:54 crc kubenswrapper[4840]: I1205 14:58:54.157403 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.155663 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.156810 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.156918 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.156932 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.188703 4840 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.188837 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 14:58:55 crc kubenswrapper[4840]: E1205 14:58:55.246893 4840 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 05 14:58:55 crc kubenswrapper[4840]: W1205 14:58:55.916690 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.916819 4840 trace.go:236] Trace[1222307857]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 14:58:45.914) (total time: 10002ms): Dec 05 14:58:55 crc kubenswrapper[4840]: Trace[1222307857]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (14:58:55.916) Dec 05 14:58:55 crc kubenswrapper[4840]: Trace[1222307857]: [10.002673598s] [10.002673598s] END Dec 05 14:58:55 crc kubenswrapper[4840]: E1205 14:58:55.916853 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 14:58:55 crc kubenswrapper[4840]: W1205 14:58:55.942450 4840 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 14:58:55 crc kubenswrapper[4840]: I1205 14:58:55.942563 4840 trace.go:236] Trace[440034856]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 14:58:45.941) (total time: 10001ms): Dec 05 14:58:55 crc kubenswrapper[4840]: Trace[440034856]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:58:55.942) Dec 05 14:58:55 crc kubenswrapper[4840]: Trace[440034856]: [10.001189037s] [10.001189037s] END Dec 05 14:58:55 crc kubenswrapper[4840]: E1205 14:58:55.942590 4840 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 14:58:56 crc kubenswrapper[4840]: I1205 14:58:56.080746 4840 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 14:58:56 crc kubenswrapper[4840]: I1205 14:58:56.080832 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 14:58:56 crc kubenswrapper[4840]: I1205 14:58:56.087147 4840 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 14:58:56 crc kubenswrapper[4840]: I1205 14:58:56.087269 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.448052 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.449455 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.449505 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.449517 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.449543 4840 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 14:58:58 crc kubenswrapper[4840]: E1205 14:58:58.453195 4840 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.727634 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.727798 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.728745 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.728780 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.728792 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:58 crc kubenswrapper[4840]: I1205 14:58:58.732290 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:58:59 crc kubenswrapper[4840]: I1205 14:58:59.166589 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:58:59 crc kubenswrapper[4840]: I1205 14:58:59.167636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:58:59 crc kubenswrapper[4840]: I1205 14:58:59.167712 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:58:59 crc kubenswrapper[4840]: I1205 14:58:59.167729 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:58:59 crc kubenswrapper[4840]: I1205 14:58:59.223014 4840 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 14:59:00 crc kubenswrapper[4840]: I1205 14:59:00.333340 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 14:59:00 crc kubenswrapper[4840]: I1205 14:59:00.333507 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:59:00 crc kubenswrapper[4840]: I1205 14:59:00.334416 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:00 crc kubenswrapper[4840]: I1205 14:59:00.334440 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:00 crc kubenswrapper[4840]: I1205 14:59:00.334449 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:00 crc kubenswrapper[4840]: I1205 14:59:00.344108 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 14:59:01 crc kubenswrapper[4840]: E1205 14:59:01.075545 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.078076 4840 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.081375 4840 trace.go:236] Trace[1257803071]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 14:58:46.103) (total time: 14977ms): Dec 05 14:59:01 crc kubenswrapper[4840]: Trace[1257803071]: ---"Objects listed" error: 14977ms (14:59:01.081) Dec 05 14:59:01 crc kubenswrapper[4840]: Trace[1257803071]: [14.977800185s] [14.977800185s] END Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.081454 4840 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.082250 4840 trace.go:236] Trace[1660165061]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 14:58:50.971) (total time: 10110ms): Dec 05 14:59:01 crc kubenswrapper[4840]: Trace[1660165061]: ---"Objects listed" error: 10110ms (14:59:01.082) Dec 05 14:59:01 crc kubenswrapper[4840]: Trace[1660165061]: [10.110383316s] [10.110383316s] END Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.082309 4840 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.171029 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.172069 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.172131 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.172145 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.244110 4840 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51206->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.244129 4840 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51214->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.244179 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51206->192.168.126.11:17697: read: connection reset by peer" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.244274 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51214->192.168.126.11:17697: read: connection reset by peer" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.244793 4840 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.244929 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.453148 4840 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.763689 4840 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 14:59:01 crc kubenswrapper[4840]: I1205 14:59:01.763782 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.116859 4840 apiserver.go:52] "Watching apiserver" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.174570 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.176306 4840 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8" exitCode=255 Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.176340 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8"} Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.211936 4840 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.212377 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-ovn-kubernetes/ovnkube-node-czvxk","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-multus/multus-additional-cni-plugins-nt6vw","openshift-multus/multus-q8pn7","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-dns/node-resolver-cbq2s","openshift-machine-config-operator/machine-config-daemon-xxvfs"] Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.212718 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.212851 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.212956 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.212953 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.213010 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.213827 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.213950 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.214262 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.214201 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.214523 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.214605 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.214888 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.215155 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.215304 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.216638 4840 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.222522 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.222896 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.224649 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.224919 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.225336 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.225582 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.225886 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.226004 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.226142 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.226327 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.226353 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.226519 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.226718 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.228551 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.228665 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.228904 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.228972 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.230286 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.232134 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.232313 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.232540 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.232908 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.236003 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.236268 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.236372 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.236588 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.236649 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.236735 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.237243 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.242148 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.242208 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.242356 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.243028 4840 scope.go:117] "RemoveContainer" containerID="53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.244134 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.245744 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.254786 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.274060 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284065 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284248 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284370 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284490 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284585 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284698 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284789 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.284933 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285036 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285176 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285658 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286088 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286229 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286317 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286405 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286644 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286741 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286833 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286955 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287054 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287147 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287231 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287366 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287456 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287553 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287646 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287736 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287824 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.287943 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288544 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289471 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289591 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289686 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289795 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289934 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290040 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290139 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290227 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290322 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290442 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290544 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290633 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290725 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290946 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291314 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291356 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291376 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291441 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291461 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291481 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291506 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291524 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291542 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291558 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291576 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291597 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291613 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291633 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291651 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291671 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285310 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285308 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285588 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285902 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285910 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.285925 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286038 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286276 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.286685 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288287 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288323 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288339 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288451 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288653 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288710 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288790 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.288808 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289151 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289256 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289500 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289733 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291962 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290055 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290295 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290368 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.290891 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291179 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291435 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291518 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291681 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291688 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.289940 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292066 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292076 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292223 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292282 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.291691 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292430 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292440 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292458 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292478 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292501 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292520 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292542 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292584 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292605 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292625 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292645 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292666 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292687 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292706 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292612 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292727 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292731 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292782 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292816 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292842 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.292967 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.293190 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.293429 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.293690 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.293810 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294217 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294252 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294284 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294318 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294345 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294370 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294396 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294427 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294457 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294489 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294520 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294557 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294589 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294621 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294656 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294690 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294721 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294751 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294782 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294815 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294843 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295000 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295046 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295079 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295114 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295153 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295188 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295213 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295237 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295259 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295282 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295356 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295392 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295426 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295456 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295487 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295515 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295546 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295576 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295610 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295643 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295671 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295699 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295728 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295757 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295789 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295820 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295850 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295897 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295926 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295956 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.295987 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296009 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296036 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296063 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296087 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296117 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296152 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296186 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296215 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296241 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296300 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296326 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296365 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296395 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296428 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296464 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296495 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296528 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296561 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296600 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296625 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296659 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296692 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296716 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296741 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296764 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296788 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296810 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296836 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296859 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296902 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296926 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296948 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296971 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296997 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297021 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297045 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297068 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297092 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297117 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297151 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297188 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297225 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297267 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297305 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297338 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297370 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297470 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297504 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297538 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297570 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297604 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297641 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297674 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297705 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297738 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297768 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297792 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297815 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297839 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297888 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297915 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297938 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297962 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298027 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298060 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck74c\" (UniqueName: \"kubernetes.io/projected/a972c8d4-fbab-487f-a2b7-782c3195d1ef-kube-api-access-ck74c\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298087 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-systemd-units\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298114 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-netns\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298140 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-bin\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298163 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-env-overrides\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298186 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298208 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-netns\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298230 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-cni-multus\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298252 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-multus-certs\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298276 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-script-lib\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298299 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-slash\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298321 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ca450a54-be29-4a30-9f3c-672b824176e6-hosts-file\") pod \"node-resolver-cbq2s\" (UID: \"ca450a54-be29-4a30-9f3c-672b824176e6\") " pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298343 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a972c8d4-fbab-487f-a2b7-782c3195d1ef-proxy-tls\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298366 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a972c8d4-fbab-487f-a2b7-782c3195d1ef-mcd-auth-proxy-config\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298386 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-kubelet\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298405 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-systemd\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298427 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ffd91a64-4156-418d-8348-1efa3563e904-cni-binary-copy\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298447 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-hostroot\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298468 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-etc-kubernetes\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298490 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a972c8d4-fbab-487f-a2b7-782c3195d1ef-rootfs\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298513 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbq6d\" (UniqueName: \"kubernetes.io/projected/e9a432c2-725d-46c6-963e-68a99ba35c89-kube-api-access-jbq6d\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298546 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298574 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298599 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-conf-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298624 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-etc-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298646 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-config\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298668 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcpsz\" (UniqueName: \"kubernetes.io/projected/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-kube-api-access-dcpsz\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298697 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298719 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298742 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-system-cni-dir\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298768 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298795 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298819 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-ovn\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298843 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-os-release\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298905 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-cnibin\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298928 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-k8s-cni-cncf-io\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298952 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-cni-bin\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298982 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299005 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-node-log\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299028 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-ovn-kubernetes\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299050 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr9mt\" (UniqueName: \"kubernetes.io/projected/ca450a54-be29-4a30-9f3c-672b824176e6-kube-api-access-cr9mt\") pod \"node-resolver-cbq2s\" (UID: \"ca450a54-be29-4a30-9f3c-672b824176e6\") " pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299073 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/ffd91a64-4156-418d-8348-1efa3563e904-multus-daemon-config\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299095 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-socket-dir-parent\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299125 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299148 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299175 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299198 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9a432c2-725d-46c6-963e-68a99ba35c89-ovn-node-metrics-cert\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299221 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-cni-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299242 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-os-release\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299266 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-var-lib-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299288 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-log-socket\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299335 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cnibin\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299358 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-kubelet\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299379 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-system-cni-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299405 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299430 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299456 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299482 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cni-binary-copy\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299503 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299532 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299562 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299586 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-netd\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299608 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z85z\" (UniqueName: \"kubernetes.io/projected/ffd91a64-4156-418d-8348-1efa3563e904-kube-api-access-9z85z\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299691 4840 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299708 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299725 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299741 4840 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299756 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299772 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299800 4840 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299820 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299841 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299861 4840 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299934 4840 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299950 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299981 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299995 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300010 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300026 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300040 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300055 4840 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300070 4840 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300085 4840 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300098 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300112 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300129 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300144 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300160 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300174 4840 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300188 4840 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300204 4840 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300219 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300233 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300248 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300261 4840 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300275 4840 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300290 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300304 4840 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300320 4840 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300334 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300349 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.302480 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.293773 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.302563 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.293884 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294042 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294132 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294345 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.294481 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296072 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296365 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296486 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296565 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.296988 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297516 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.297982 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.298432 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299101 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299385 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299401 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.299421 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300327 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300648 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.300764 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.301299 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.301325 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.301929 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.302859 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:02.802839533 +0000 UTC m=+21.143902157 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.302954 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.302034 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.302491 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.303073 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:02.8030625 +0000 UTC m=+21.144125134 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.302554 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.303235 4840 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.303565 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.303614 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.303802 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.304113 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.304138 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.304172 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.305191 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.305479 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.306004 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.306473 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.306531 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.307260 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.307352 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.307436 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.307461 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.307499 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.307371 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.309041 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.309240 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.313219 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.313390 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.319487 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.319518 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.319535 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.319604 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:02.819580545 +0000 UTC m=+21.160643239 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.321415 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.321458 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.322222 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.322328 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.322485 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.322359 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.322557 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.336001 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.336232 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.338050 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.338836 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.339255 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.341609 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.342839 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.343304 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.343607 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.344253 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.344436 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.344451 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.344508 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:02.844486191 +0000 UTC m=+21.185548805 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.344707 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.345365 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.347741 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.348153 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.349149 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.349272 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.349529 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.349807 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.350166 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.350234 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.350310 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.351734 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.351784 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.351884 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.352004 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.352177 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.352338 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.352349 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.350572 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.350788 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.351014 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.352510 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.351224 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.351557 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.352553 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.353083 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.353307 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.353582 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.353611 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.353791 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.354260 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.355572 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.355828 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.359929 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.360213 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.360515 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.360629 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.360702 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.361142 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.361275 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.361741 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.362314 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.362302 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.362549 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.362598 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.362666 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.363200 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.363407 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.363472 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.364254 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.364276 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.364442 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.364499 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.364514 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.364728 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.365122 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.365587 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.365944 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.366283 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.366609 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.366900 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.367151 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.367651 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.367805 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.368065 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.368233 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.368288 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.368492 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.368521 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.368758 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.368979 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.369307 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.369759 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.370463 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.370786 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.370996 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.371099 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.371371 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.371964 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372181 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372252 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372197 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372302 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372607 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372364 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.350417 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.372851 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 14:59:02.872819722 +0000 UTC m=+21.213882336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372931 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.372816 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.373646 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.373758 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.376808 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.376929 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.377363 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.378844 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.380805 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.388216 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.393667 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.395400 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.397583 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.404838 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-os-release\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.404898 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-socket-dir-parent\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.404932 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.404951 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9a432c2-725d-46c6-963e-68a99ba35c89-ovn-node-metrics-cert\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.404968 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-cni-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.404988 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-var-lib-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405063 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-log-socket\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405084 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cnibin\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405106 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-kubelet\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405107 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-os-release\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405129 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-system-cni-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405201 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-system-cni-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405285 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405329 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cni-binary-copy\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405365 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405392 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-socket-dir-parent\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405423 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-netd\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405440 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405457 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z85z\" (UniqueName: \"kubernetes.io/projected/ffd91a64-4156-418d-8348-1efa3563e904-kube-api-access-9z85z\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405496 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405527 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405559 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck74c\" (UniqueName: \"kubernetes.io/projected/a972c8d4-fbab-487f-a2b7-782c3195d1ef-kube-api-access-ck74c\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405594 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-systemd-units\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405627 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-netns\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405684 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-log-socket\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405715 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405852 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-cni-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405903 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-var-lib-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.405994 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406047 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-kubelet\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406053 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cnibin\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406079 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-netd\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406494 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-systemd-units\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406628 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-netns\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406757 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cni-binary-copy\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406920 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-bin\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.406968 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-bin\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.407320 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-env-overrides\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.407598 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408118 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-env-overrides\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408264 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-netns\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408341 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-netns\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408295 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-cni-multus\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408406 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-multus-certs\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408432 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-cni-multus\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408470 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-script-lib\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408516 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-multus-certs\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408530 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-slash\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408605 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-slash\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408639 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ca450a54-be29-4a30-9f3c-672b824176e6-hosts-file\") pod \"node-resolver-cbq2s\" (UID: \"ca450a54-be29-4a30-9f3c-672b824176e6\") " pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408662 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-etc-kubernetes\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408738 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/ca450a54-be29-4a30-9f3c-672b824176e6-hosts-file\") pod \"node-resolver-cbq2s\" (UID: \"ca450a54-be29-4a30-9f3c-672b824176e6\") " pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408775 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a972c8d4-fbab-487f-a2b7-782c3195d1ef-proxy-tls\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408790 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-etc-kubernetes\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408825 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a972c8d4-fbab-487f-a2b7-782c3195d1ef-mcd-auth-proxy-config\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409472 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-script-lib\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409714 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a972c8d4-fbab-487f-a2b7-782c3195d1ef-mcd-auth-proxy-config\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.408852 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-kubelet\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409781 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-systemd\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409808 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ffd91a64-4156-418d-8348-1efa3563e904-cni-binary-copy\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409834 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-hostroot\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409857 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a972c8d4-fbab-487f-a2b7-782c3195d1ef-rootfs\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409957 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbq6d\" (UniqueName: \"kubernetes.io/projected/e9a432c2-725d-46c6-963e-68a99ba35c89-kube-api-access-jbq6d\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.409996 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-conf-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410024 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-etc-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410045 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-config\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410068 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcpsz\" (UniqueName: \"kubernetes.io/projected/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-kube-api-access-dcpsz\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410108 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410131 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-system-cni-dir\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410177 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-cni-bin\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410220 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-ovn\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410244 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-os-release\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410270 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-cnibin\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410293 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-k8s-cni-cncf-io\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410326 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-node-log\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410352 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-ovn-kubernetes\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410378 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr9mt\" (UniqueName: \"kubernetes.io/projected/ca450a54-be29-4a30-9f3c-672b824176e6-kube-api-access-cr9mt\") pod \"node-resolver-cbq2s\" (UID: \"ca450a54-be29-4a30-9f3c-672b824176e6\") " pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410401 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/ffd91a64-4156-418d-8348-1efa3563e904-multus-daemon-config\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410420 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/ffd91a64-4156-418d-8348-1efa3563e904-cni-binary-copy\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410559 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410716 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-systemd\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410750 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-system-cni-dir\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410775 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-hostroot\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410807 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a972c8d4-fbab-487f-a2b7-782c3195d1ef-rootfs\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.410921 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-kubelet\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411055 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/ffd91a64-4156-418d-8348-1efa3563e904-multus-daemon-config\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411161 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411300 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-ovn\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411330 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-multus-conf-dir\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411389 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-cnibin\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411412 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-var-lib-cni-bin\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411455 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-os-release\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411466 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-ovn-kubernetes\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411473 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-node-log\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411491 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/ffd91a64-4156-418d-8348-1efa3563e904-host-run-k8s-cni-cncf-io\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411498 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-etc-openvswitch\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.411531 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.412671 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a972c8d4-fbab-487f-a2b7-782c3195d1ef-proxy-tls\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.412935 4840 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.412962 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.412987 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.413005 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.413026 4840 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.413044 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.413063 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.413159 4840 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415147 4840 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415165 4840 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415179 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415195 4840 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415236 4840 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415249 4840 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415262 4840 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415277 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415314 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415327 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415339 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415350 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415411 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415426 4840 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415441 4840 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415453 4840 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415489 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415504 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415518 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415536 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415576 4840 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415588 4840 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415601 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415614 4840 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415651 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415668 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415747 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415762 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415776 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415790 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415828 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415841 4840 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415857 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415901 4840 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415915 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415927 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415940 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415979 4840 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.415992 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416004 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416017 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416028 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-config\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416055 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416164 4840 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416178 4840 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416216 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416235 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416249 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416261 4840 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416297 4840 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416319 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416331 4840 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416344 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416355 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416393 4840 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416405 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416420 4840 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416434 4840 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416472 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416484 4840 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416496 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416509 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416544 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416574 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416587 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416598 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416636 4840 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416649 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416661 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416674 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416715 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416729 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416743 4840 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416756 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416792 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416807 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416824 4840 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416837 4840 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416884 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416900 4840 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416913 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416926 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416938 4840 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416977 4840 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.416990 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417004 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417019 4840 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417058 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417072 4840 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417087 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417102 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417141 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417155 4840 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417168 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417179 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417228 4840 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417242 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417257 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417294 4840 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417309 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417322 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417337 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417375 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417392 4840 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417406 4840 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417419 4840 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417432 4840 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417472 4840 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417485 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417497 4840 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417511 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417557 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417571 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417583 4840 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417596 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417635 4840 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417648 4840 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417661 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417674 4840 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417712 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417727 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417740 4840 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417752 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417791 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417807 4840 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417819 4840 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417831 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417844 4840 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417896 4840 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417912 4840 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417924 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417937 4840 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417949 4840 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.417988 4840 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418002 4840 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418015 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418026 4840 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418038 4840 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418051 4840 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418064 4840 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418079 4840 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418092 4840 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418142 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418157 4840 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418168 4840 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418180 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418191 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.418579 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.420370 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9a432c2-725d-46c6-963e-68a99ba35c89-ovn-node-metrics-cert\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.423089 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z85z\" (UniqueName: \"kubernetes.io/projected/ffd91a64-4156-418d-8348-1efa3563e904-kube-api-access-9z85z\") pod \"multus-q8pn7\" (UID: \"ffd91a64-4156-418d-8348-1efa3563e904\") " pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.424218 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck74c\" (UniqueName: \"kubernetes.io/projected/a972c8d4-fbab-487f-a2b7-782c3195d1ef-kube-api-access-ck74c\") pod \"machine-config-daemon-xxvfs\" (UID: \"a972c8d4-fbab-487f-a2b7-782c3195d1ef\") " pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.425200 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.433299 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcpsz\" (UniqueName: \"kubernetes.io/projected/78a40d7a-9ba7-4a35-8263-6faf0ca9d52e-kube-api-access-dcpsz\") pod \"multus-additional-cni-plugins-nt6vw\" (UID: \"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\") " pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.435354 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbq6d\" (UniqueName: \"kubernetes.io/projected/e9a432c2-725d-46c6-963e-68a99ba35c89-kube-api-access-jbq6d\") pod \"ovnkube-node-czvxk\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.437671 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.438014 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr9mt\" (UniqueName: \"kubernetes.io/projected/ca450a54-be29-4a30-9f3c-672b824176e6-kube-api-access-cr9mt\") pod \"node-resolver-cbq2s\" (UID: \"ca450a54-be29-4a30-9f3c-672b824176e6\") " pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.448770 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.463149 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.476344 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.492054 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.505011 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.515368 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.518664 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.529110 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 14:59:02 crc kubenswrapper[4840]: W1205 14:59:02.543968 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-5e526d411742739ee5a7fb64b9ba82744145f59b35c956a4ef7db51f2c60bca2 WatchSource:0}: Error finding container 5e526d411742739ee5a7fb64b9ba82744145f59b35c956a4ef7db51f2c60bca2: Status 404 returned error can't find the container with id 5e526d411742739ee5a7fb64b9ba82744145f59b35c956a4ef7db51f2c60bca2 Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.556442 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.567064 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.572772 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 14:59:02 crc kubenswrapper[4840]: W1205 14:59:02.579067 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda972c8d4_fbab_487f_a2b7_782c3195d1ef.slice/crio-d97f5f61794b4f0820ef62c2536a8ea7c24ef4d5a533364faf0ee71d8af1cf41 WatchSource:0}: Error finding container d97f5f61794b4f0820ef62c2536a8ea7c24ef4d5a533364faf0ee71d8af1cf41: Status 404 returned error can't find the container with id d97f5f61794b4f0820ef62c2536a8ea7c24ef4d5a533364faf0ee71d8af1cf41 Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.583494 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.608459 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.615710 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-q8pn7" Dec 05 14:59:02 crc kubenswrapper[4840]: W1205 14:59:02.625644 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9a432c2_725d_46c6_963e_68a99ba35c89.slice/crio-35d579b74fcbf6e4655624c42ca06f576717bffd48f5e68405c791110e95dc85 WatchSource:0}: Error finding container 35d579b74fcbf6e4655624c42ca06f576717bffd48f5e68405c791110e95dc85: Status 404 returned error can't find the container with id 35d579b74fcbf6e4655624c42ca06f576717bffd48f5e68405c791110e95dc85 Dec 05 14:59:02 crc kubenswrapper[4840]: W1205 14:59:02.654543 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78a40d7a_9ba7_4a35_8263_6faf0ca9d52e.slice/crio-f63547b0f7b8fc45c4acc87fcd246d4aaaf7a8ea6e774218e852f2beb9090688 WatchSource:0}: Error finding container f63547b0f7b8fc45c4acc87fcd246d4aaaf7a8ea6e774218e852f2beb9090688: Status 404 returned error can't find the container with id f63547b0f7b8fc45c4acc87fcd246d4aaaf7a8ea6e774218e852f2beb9090688 Dec 05 14:59:02 crc kubenswrapper[4840]: W1205 14:59:02.657899 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podffd91a64_4156_418d_8348_1efa3563e904.slice/crio-42998faf6374bbdaf911324411296ad2c80342e4339589329b0c699179e51945 WatchSource:0}: Error finding container 42998faf6374bbdaf911324411296ad2c80342e4339589329b0c699179e51945: Status 404 returned error can't find the container with id 42998faf6374bbdaf911324411296ad2c80342e4339589329b0c699179e51945 Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.677488 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cbq2s" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.820581 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.820640 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.820671 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.820833 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.820903 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:03.820887722 +0000 UTC m=+22.161950346 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.820975 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.820995 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.821008 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.821073 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:03.821059937 +0000 UTC m=+22.162122561 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.821113 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.821244 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:03.821138179 +0000 UTC m=+22.162200803 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.921531 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:02 crc kubenswrapper[4840]: I1205 14:59:02.921669 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.921887 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.921912 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.921926 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.921978 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:03.921961958 +0000 UTC m=+22.263024572 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:02 crc kubenswrapper[4840]: E1205 14:59:02.922070 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 14:59:03.92204123 +0000 UTC m=+22.263103844 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.181145 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cbq2s" event={"ID":"ca450a54-be29-4a30-9f3c-672b824176e6","Type":"ContainerStarted","Data":"df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.181746 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cbq2s" event={"ID":"ca450a54-be29-4a30-9f3c-672b824176e6","Type":"ContainerStarted","Data":"f10f61bb1928a0aebaf7da63f3ef1e6f4467b709b7788cbe648b545cb04882f8"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.182904 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.185044 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.185242 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.186433 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerStarted","Data":"c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.186472 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerStarted","Data":"42998faf6374bbdaf911324411296ad2c80342e4339589329b0c699179e51945"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.188067 4840 generic.go:334] "Generic (PLEG): container finished" podID="78a40d7a-9ba7-4a35-8263-6faf0ca9d52e" containerID="4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b" exitCode=0 Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.188158 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerDied","Data":"4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.188209 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerStarted","Data":"f63547b0f7b8fc45c4acc87fcd246d4aaaf7a8ea6e774218e852f2beb9090688"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.190317 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ddf79bcfabe8af9bbe2fa6232dc1c4589902af659a8c39466f12f28269ff7bc1"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.193259 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.193332 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.193466 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d45ee3c01934bc46b0c326af0aa2b65ef8b68fac85b86a2eae1317a504d5950a"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.199754 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.199856 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"5e526d411742739ee5a7fb64b9ba82744145f59b35c956a4ef7db51f2c60bca2"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.201466 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee" exitCode=0 Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.201546 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.201576 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"35d579b74fcbf6e4655624c42ca06f576717bffd48f5e68405c791110e95dc85"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.203245 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.203335 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.203398 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"d97f5f61794b4f0820ef62c2536a8ea7c24ef4d5a533364faf0ee71d8af1cf41"} Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.210014 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.223597 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.262832 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.303161 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.352038 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.385604 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.404195 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.422808 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.443214 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.463321 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.481270 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.502733 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.534586 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.561966 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.578029 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.599645 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.616527 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.638166 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.650287 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.664609 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.680462 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.685702 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-hnqdw"] Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.686120 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.695322 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.695345 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.695518 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.695623 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.699504 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.718849 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.731220 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/eefb1b71-2f32-4847-b8eb-27dcce2a320d-serviceca\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.731271 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eefb1b71-2f32-4847-b8eb-27dcce2a320d-host\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.731223 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.731291 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdnc4\" (UniqueName: \"kubernetes.io/projected/eefb1b71-2f32-4847-b8eb-27dcce2a320d-kube-api-access-cdnc4\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.755230 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.778323 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.800650 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.831100 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.832023 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdnc4\" (UniqueName: \"kubernetes.io/projected/eefb1b71-2f32-4847-b8eb-27dcce2a320d-kube-api-access-cdnc4\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.832090 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.832149 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/eefb1b71-2f32-4847-b8eb-27dcce2a320d-serviceca\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.832227 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.832267 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.832300 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eefb1b71-2f32-4847-b8eb-27dcce2a320d-host\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.832381 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/eefb1b71-2f32-4847-b8eb-27dcce2a320d-host\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832427 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832473 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832478 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832501 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832536 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:05.832516665 +0000 UTC m=+24.173579299 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832578 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:05.832559727 +0000 UTC m=+24.173622401 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832660 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.832688 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:05.83268131 +0000 UTC m=+24.173744014 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.833366 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/eefb1b71-2f32-4847-b8eb-27dcce2a320d-serviceca\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.849149 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.862916 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdnc4\" (UniqueName: \"kubernetes.io/projected/eefb1b71-2f32-4847-b8eb-27dcce2a320d-kube-api-access-cdnc4\") pod \"node-ca-hnqdw\" (UID: \"eefb1b71-2f32-4847-b8eb-27dcce2a320d\") " pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.869956 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.884676 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.894570 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.910717 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.924788 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.933175 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.933366 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 14:59:05.933335974 +0000 UTC m=+24.274398588 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.933577 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.933754 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.933779 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.933792 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:03 crc kubenswrapper[4840]: E1205 14:59:03.933835 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:05.933827088 +0000 UTC m=+24.274889792 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.938601 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.949528 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.961807 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.977813 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:03 crc kubenswrapper[4840]: I1205 14:59:03.993168 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:03Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.004170 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.005268 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hnqdw" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.066632 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.066684 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.067067 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.066701 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.067306 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.067481 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.070393 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.071043 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.072109 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.072717 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.073664 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.074173 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.074734 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.075648 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.076322 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.077197 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.077690 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.078699 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.079227 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.079697 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.080691 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.081198 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.082126 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.082583 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.083129 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.084070 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.084533 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.085416 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.085824 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.086775 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.087195 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.087831 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.089278 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.089779 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.091051 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.091533 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.092377 4840 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.092477 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.094074 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.094979 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.095433 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.097440 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.098196 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.099379 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.100124 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.101386 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.101965 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.102908 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.103559 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.105247 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.105823 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.106903 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.107549 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.109069 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.109574 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.111625 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.112083 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.113003 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.113542 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.114034 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.241370 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerStarted","Data":"65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f"} Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.257117 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.259389 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5"} Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.259469 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48"} Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.259483 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2"} Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.259495 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c"} Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.272597 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hnqdw" event={"ID":"eefb1b71-2f32-4847-b8eb-27dcce2a320d","Type":"ContainerStarted","Data":"30767c276aa9e88bcd9f49fc084171b392189c068c626d44ba74bebb1a5e9ddc"} Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.278180 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.298265 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.312928 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.331305 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.344678 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.366988 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.389389 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.411514 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.427754 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.450674 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.468425 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.505152 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.532741 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.853775 4840 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.855999 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.856038 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.856048 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.856164 4840 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.863081 4840 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.863393 4840 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.864619 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.864662 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.864672 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.864689 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.864709 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:04Z","lastTransitionTime":"2025-12-05T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.884391 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.888361 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.888416 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.888426 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.888442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.888458 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:04Z","lastTransitionTime":"2025-12-05T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.901440 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.904981 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.905007 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.905016 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.905030 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.905039 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:04Z","lastTransitionTime":"2025-12-05T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.916263 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.919676 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.919702 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.919710 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.919724 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.919734 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:04Z","lastTransitionTime":"2025-12-05T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.930484 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.935720 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.935768 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.935781 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.935811 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.935823 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:04Z","lastTransitionTime":"2025-12-05T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.948742 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:04Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:04 crc kubenswrapper[4840]: E1205 14:59:04.948971 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.950849 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.950890 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.950900 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.950915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:04 crc kubenswrapper[4840]: I1205 14:59:04.950926 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:04Z","lastTransitionTime":"2025-12-05T14:59:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.053154 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.053208 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.053222 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.053241 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.053255 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.156259 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.156303 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.156316 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.156332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.156343 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.259132 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.259187 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.259196 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.259214 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.259225 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.276148 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.278611 4840 generic.go:334] "Generic (PLEG): container finished" podID="78a40d7a-9ba7-4a35-8263-6faf0ca9d52e" containerID="65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f" exitCode=0 Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.278677 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerDied","Data":"65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.285799 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.285881 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.287423 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hnqdw" event={"ID":"eefb1b71-2f32-4847-b8eb-27dcce2a320d","Type":"ContainerStarted","Data":"8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.292583 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.309630 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.325596 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.336907 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.360169 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.361527 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.361557 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.361566 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.361579 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.361587 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.374849 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.387796 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.399661 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.414765 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.424779 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.435879 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.448376 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.464188 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.464234 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.464246 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.464266 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.464282 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.467570 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.481830 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.492687 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.505763 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.517010 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.528793 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.547694 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.559538 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.567738 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.567774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.567784 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.567798 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.567808 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.574754 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.585273 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.596833 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.608150 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.621955 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.634165 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.648678 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.666419 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:05Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.670743 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.670824 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.670838 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.670887 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.670904 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.774043 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.774092 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.774103 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.774122 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.774136 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.854723 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.854810 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.854849 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.854974 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.855040 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.855082 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.855056 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:09.85503765 +0000 UTC m=+28.196100264 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.855134 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.855154 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.855162 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:09.855137413 +0000 UTC m=+28.196200047 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.855220 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:09.855197824 +0000 UTC m=+28.196260628 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.876812 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.877329 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.877345 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.877365 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.877380 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.955194 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.955263 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.955400 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.955416 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.955427 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.955459 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 14:59:09.955427118 +0000 UTC m=+28.296489742 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 14:59:05 crc kubenswrapper[4840]: E1205 14:59:05.955497 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:09.95548727 +0000 UTC m=+28.296549894 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.979784 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.979827 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.979837 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.979854 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:05 crc kubenswrapper[4840]: I1205 14:59:05.979886 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:05Z","lastTransitionTime":"2025-12-05T14:59:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.066060 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.066107 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.066158 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:06 crc kubenswrapper[4840]: E1205 14:59:06.066248 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:06 crc kubenswrapper[4840]: E1205 14:59:06.066362 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:06 crc kubenswrapper[4840]: E1205 14:59:06.066456 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.082558 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.082885 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.082971 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.083038 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.083097 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.186110 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.186320 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.186384 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.186442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.186494 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.289569 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.289625 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.289642 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.289668 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.289685 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.292255 4840 generic.go:334] "Generic (PLEG): container finished" podID="78a40d7a-9ba7-4a35-8263-6faf0ca9d52e" containerID="c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0" exitCode=0 Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.292346 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerDied","Data":"c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.306950 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.319943 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.332803 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.349351 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.365735 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.383811 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.391782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.391818 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.391830 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.391847 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.391857 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.398272 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.408445 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.424068 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.437038 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.455402 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.468470 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.478828 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.489936 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:06Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.494483 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.494518 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.494527 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.494542 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.494557 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.597177 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.597203 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.597212 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.597226 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.597235 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.699100 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.699143 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.699152 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.699168 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.699179 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.803533 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.803584 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.803596 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.803616 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.803628 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.905573 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.905602 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.905610 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.905623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:06 crc kubenswrapper[4840]: I1205 14:59:06.905635 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:06Z","lastTransitionTime":"2025-12-05T14:59:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.009293 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.009349 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.009372 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.009403 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.009428 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.111361 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.111394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.111403 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.111417 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.111427 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.213621 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.213664 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.213675 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.213694 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.213708 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.299659 4840 generic.go:334] "Generic (PLEG): container finished" podID="78a40d7a-9ba7-4a35-8263-6faf0ca9d52e" containerID="d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70" exitCode=0 Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.299733 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerDied","Data":"d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.305090 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.316704 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.316752 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.316770 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.316794 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.316812 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.319986 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.353342 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.370954 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.388053 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.401422 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.417479 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.419079 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.419116 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.419126 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.419141 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.419151 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.446261 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.457587 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.469439 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.483138 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.493823 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.507961 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.523050 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.523096 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.523107 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.523126 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.523137 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.523484 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.534326 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:07Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.626812 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.626877 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.626888 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.626905 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.626916 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.729827 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.729910 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.729922 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.729940 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.729953 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.832478 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.832517 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.832529 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.832553 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.832564 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.935576 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.935647 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.935661 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.935686 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:07 crc kubenswrapper[4840]: I1205 14:59:07.935702 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:07Z","lastTransitionTime":"2025-12-05T14:59:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.038805 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.038895 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.038915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.038938 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.038955 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.066212 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.066273 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:08 crc kubenswrapper[4840]: E1205 14:59:08.066354 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.066421 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:08 crc kubenswrapper[4840]: E1205 14:59:08.066453 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:08 crc kubenswrapper[4840]: E1205 14:59:08.066744 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.141828 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.141929 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.141944 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.141965 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.141979 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.244342 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.244375 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.244384 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.244397 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.244407 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.311898 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerStarted","Data":"c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.325901 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.342506 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.346916 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.346982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.347007 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.347038 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.347054 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.358334 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.379679 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.396711 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.417568 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.438485 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.448816 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.448876 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.448890 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.448907 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.448918 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.453188 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.468207 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.481165 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.491017 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.503530 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.551037 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.551080 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.551091 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.551108 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.551119 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.575899 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.613293 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:08Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.652708 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.652748 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.652758 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.652775 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.652787 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.755106 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.755147 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.755156 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.755170 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.755179 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.857412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.857453 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.857465 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.857480 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.857490 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.959819 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.959884 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.959896 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.959914 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:08 crc kubenswrapper[4840]: I1205 14:59:08.959925 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:08Z","lastTransitionTime":"2025-12-05T14:59:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.063173 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.063217 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.063227 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.063242 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.063253 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.166719 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.167102 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.167113 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.167167 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.167191 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.270595 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.270669 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.270691 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.270716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.270732 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.326161 4840 generic.go:334] "Generic (PLEG): container finished" podID="78a40d7a-9ba7-4a35-8263-6faf0ca9d52e" containerID="c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043" exitCode=0 Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.326205 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerDied","Data":"c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.344684 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.362089 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.373364 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.373401 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.373411 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.373429 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.373442 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.384756 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.402466 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.418171 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.432910 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.448182 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.462951 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.475753 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.475785 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.475796 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.475809 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.475818 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.480598 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.496943 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.510405 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.534279 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.550563 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.567766 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:09Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.579110 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.579168 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.579181 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.579210 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.579225 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.681269 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.681324 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.681336 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.681352 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.681363 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.785024 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.785071 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.785083 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.785104 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.785116 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.888206 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.888257 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.888272 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.888293 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.888309 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.919392 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.919476 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.919518 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919540 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919665 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:17.919641613 +0000 UTC m=+36.260704397 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919722 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919750 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919767 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919720 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919836 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:17.919813768 +0000 UTC m=+36.260876562 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:09 crc kubenswrapper[4840]: E1205 14:59:09.919878 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:17.919851769 +0000 UTC m=+36.260914563 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.990692 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.990736 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.990754 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.990774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:09 crc kubenswrapper[4840]: I1205 14:59:09.990789 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:09Z","lastTransitionTime":"2025-12-05T14:59:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.021000 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.021206 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 14:59:18.021173033 +0000 UTC m=+36.362235647 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.021308 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.021468 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.021483 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.021494 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.021530 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:18.021520002 +0000 UTC m=+36.362582816 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.066197 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.066267 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.066331 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.066343 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.066429 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:10 crc kubenswrapper[4840]: E1205 14:59:10.066583 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.093713 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.093741 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.093751 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.093764 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.093773 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.195941 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.195974 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.195986 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.196003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.196014 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.298420 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.298467 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.298479 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.298496 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.298509 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.332710 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.333800 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.333896 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.337542 4840 generic.go:334] "Generic (PLEG): container finished" podID="78a40d7a-9ba7-4a35-8263-6faf0ca9d52e" containerID="4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174" exitCode=0 Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.337583 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerDied","Data":"4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.347299 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.358823 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.361209 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.370858 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.387849 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.400284 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.403138 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.403186 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.403200 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.403222 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.403235 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.413228 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.428788 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.446641 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.463939 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.477383 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.489252 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.501439 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.510291 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.510360 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.510372 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.510391 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.510402 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.520022 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.543258 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.559363 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.576273 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.590144 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.609188 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.612971 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.613004 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.613013 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.613027 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.613035 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.619135 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.630100 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.643942 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.656854 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.673446 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.688204 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.706029 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.715662 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.715710 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.715719 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.715733 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.715743 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.729471 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.744577 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.757823 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.771081 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:10Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.821119 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.821164 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.821175 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.821192 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.821204 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.924184 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.924221 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.924231 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.924244 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:10 crc kubenswrapper[4840]: I1205 14:59:10.924253 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:10Z","lastTransitionTime":"2025-12-05T14:59:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.026938 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.026978 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.026988 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.027002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.027012 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.129542 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.129568 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.129576 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.129588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.129598 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.232521 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.232583 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.232599 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.233356 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.233400 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.335677 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.335706 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.335717 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.335731 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.335742 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.360145 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" event={"ID":"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e","Type":"ContainerStarted","Data":"74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.360806 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.372343 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.429016 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.437594 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.437631 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.437640 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.437654 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.437663 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.445555 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.459248 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.472812 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.486447 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.499443 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.509956 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.520317 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.528797 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.539656 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.539699 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.539712 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.539728 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.539739 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.542628 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.559601 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.575962 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.588575 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:11Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.641672 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.641701 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.641710 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.641722 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.641730 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.744002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.744043 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.744054 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.744071 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.744083 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.846703 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.846783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.846804 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.846829 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.846846 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.949628 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.949697 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.949721 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.949748 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:11 crc kubenswrapper[4840]: I1205 14:59:11.949766 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:11Z","lastTransitionTime":"2025-12-05T14:59:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.052817 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.052877 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.052889 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.052905 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.052916 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.066531 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.066566 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.066569 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:12 crc kubenswrapper[4840]: E1205 14:59:12.066658 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:12 crc kubenswrapper[4840]: E1205 14:59:12.066901 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:12 crc kubenswrapper[4840]: E1205 14:59:12.066961 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.082096 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.093448 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.112122 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.130677 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.147424 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.157635 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.157662 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.157673 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.157688 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.157699 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.164971 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.181516 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.192725 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.201578 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.216270 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.232664 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.248728 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.259381 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.259424 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.259436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.259461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.259473 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.260754 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.278062 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.361818 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.361900 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.361910 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.361937 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.361948 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.367435 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/0.log" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.370496 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026" exitCode=1 Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.370537 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.371227 4840 scope.go:117] "RemoveContainer" containerID="171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.383749 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.395390 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.413665 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.425784 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.436782 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.450109 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.464567 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.464906 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.464938 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.464974 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.464991 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.465004 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.480497 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.494671 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.505750 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.517939 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.531563 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.554689 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:12Z\\\",\\\"message\\\":\\\"1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 14:59:12.272998 6138 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 14:59:12.273032 6138 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 14:59:12.273069 6138 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 14:59:12.273120 6138 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 14:59:12.273189 6138 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 14:59:12.273198 6138 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 14:59:12.273208 6138 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 14:59:12.273212 6138 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 14:59:12.273224 6138 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 14:59:12.273238 6138 factory.go:656] Stopping watch factory\\\\nI1205 14:59:12.273250 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1205 14:59:12.273273 6138 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 14:59:12.273283 6138 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 14:59:12.273289 6138 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 14:59:12.273295 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 14:59:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.567457 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.567489 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.567497 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.567510 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.567519 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.569102 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:12Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.670119 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.670169 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.670180 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.670197 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.670212 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.772447 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.772492 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.772503 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.772519 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.772531 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.875305 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.875353 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.875366 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.875385 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.875395 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.977505 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.977556 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.977577 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.977597 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:12 crc kubenswrapper[4840]: I1205 14:59:12.977611 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:12Z","lastTransitionTime":"2025-12-05T14:59:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.079588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.079631 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.079646 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.079663 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.079674 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.181663 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.181697 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.181706 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.181721 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.181730 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.283783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.283839 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.283848 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.283879 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.283894 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.375138 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/0.log" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.378169 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.378289 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.386193 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.386239 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.386251 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.386268 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.386282 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.391841 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.406913 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.418523 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.429787 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.441433 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.455303 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.468013 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.479291 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.488343 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.488385 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.488394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.488410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.488420 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.492088 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.504516 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.516793 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.528799 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.547968 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:12Z\\\",\\\"message\\\":\\\"1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 14:59:12.272998 6138 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 14:59:12.273032 6138 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 14:59:12.273069 6138 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 14:59:12.273120 6138 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 14:59:12.273189 6138 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 14:59:12.273198 6138 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 14:59:12.273208 6138 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 14:59:12.273212 6138 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 14:59:12.273224 6138 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 14:59:12.273238 6138 factory.go:656] Stopping watch factory\\\\nI1205 14:59:12.273250 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1205 14:59:12.273273 6138 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 14:59:12.273283 6138 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 14:59:12.273289 6138 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 14:59:12.273295 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 14:59:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.563045 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:13Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.590592 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.590619 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.590627 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.590640 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.590648 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.693496 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.693548 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.693557 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.693571 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.693581 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.796072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.796118 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.796128 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.796145 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.796158 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.898615 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.898654 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.898665 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.898681 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:13 crc kubenswrapper[4840]: I1205 14:59:13.898693 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:13Z","lastTransitionTime":"2025-12-05T14:59:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.001465 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.001510 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.001518 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.001533 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.001542 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.066467 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.066518 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:14 crc kubenswrapper[4840]: E1205 14:59:14.066602 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.066470 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:14 crc kubenswrapper[4840]: E1205 14:59:14.066713 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:14 crc kubenswrapper[4840]: E1205 14:59:14.066901 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.103558 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.103639 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.103654 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.103702 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.103719 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.206804 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.206894 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.206912 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.206936 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.206953 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.309707 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.309778 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.309793 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.309818 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.309833 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.381041 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.412223 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.412275 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.412287 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.412307 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.412322 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.514659 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.514701 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.514709 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.514723 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.514732 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.616718 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.616758 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.616768 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.616787 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.616803 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.718736 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.718783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.718796 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.718813 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.718824 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.821469 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.821556 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.821575 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.821596 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.821612 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.875224 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5"] Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.876281 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.880857 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.881295 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.908968 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:14Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.925729 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.925797 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.925812 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.925798 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:14Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.925832 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.925950 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:14Z","lastTransitionTime":"2025-12-05T14:59:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.956395 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:14Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.975240 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:14Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:14 crc kubenswrapper[4840]: I1205 14:59:14.988102 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:14Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.000417 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:14Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.012654 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.028803 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.028851 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.028862 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.028907 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.028921 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.031767 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.046997 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.059352 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.069382 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.075949 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.075983 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.076001 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddcrb\" (UniqueName: \"kubernetes.io/projected/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-kube-api-access-ddcrb\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.076040 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.079726 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.092054 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.111699 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:12Z\\\",\\\"message\\\":\\\"1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 14:59:12.272998 6138 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 14:59:12.273032 6138 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 14:59:12.273069 6138 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 14:59:12.273120 6138 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 14:59:12.273189 6138 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 14:59:12.273198 6138 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 14:59:12.273208 6138 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 14:59:12.273212 6138 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 14:59:12.273224 6138 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 14:59:12.273238 6138 factory.go:656] Stopping watch factory\\\\nI1205 14:59:12.273250 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1205 14:59:12.273273 6138 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 14:59:12.273283 6138 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 14:59:12.273289 6138 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 14:59:12.273295 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 14:59:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.126146 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.131908 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.131945 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.131956 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.131970 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.131979 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.177107 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.177148 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.177164 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddcrb\" (UniqueName: \"kubernetes.io/projected/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-kube-api-access-ddcrb\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.177201 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.177756 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.178316 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.188350 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.193399 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddcrb\" (UniqueName: \"kubernetes.io/projected/a03adad7-7e03-4bc8-9a48-98dff0e91cc9-kube-api-access-ddcrb\") pod \"ovnkube-control-plane-749d76644c-tszn5\" (UID: \"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.213948 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.213991 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.214003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.214021 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.214033 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.230723 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.235219 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.235267 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.235284 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.235303 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.235316 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.250587 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.255144 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.255195 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.255209 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.255231 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.255246 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.267035 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.270788 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.270827 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.270836 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.270848 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.270856 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.282897 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.285986 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.286015 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.286024 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.286038 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.286048 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.297018 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.297132 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.298448 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.298474 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.298483 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.298527 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.298538 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.386213 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/1.log" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.386794 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/0.log" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.389175 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc" exitCode=1 Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.389229 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.389294 4840 scope.go:117] "RemoveContainer" containerID="171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.390421 4840 scope.go:117] "RemoveContainer" containerID="9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc" Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.390760 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.400475 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.400527 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.400544 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.400563 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.400575 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.404841 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.426288 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:12Z\\\",\\\"message\\\":\\\"1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 14:59:12.272998 6138 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 14:59:12.273032 6138 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 14:59:12.273069 6138 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 14:59:12.273120 6138 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 14:59:12.273189 6138 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 14:59:12.273198 6138 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 14:59:12.273208 6138 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 14:59:12.273212 6138 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 14:59:12.273224 6138 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 14:59:12.273238 6138 factory.go:656] Stopping watch factory\\\\nI1205 14:59:12.273250 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1205 14:59:12.273273 6138 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 14:59:12.273283 6138 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 14:59:12.273289 6138 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 14:59:12.273295 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 14:59:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.443687 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.460038 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.476054 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.488673 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.492186 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.503786 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.503846 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.503937 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.503974 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.503996 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.505032 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: W1205 14:59:15.505115 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda03adad7_7e03_4bc8_9a48_98dff0e91cc9.slice/crio-e9b546f131ee14e820f63bb03775ee30ed71ee413e2306e6dbe193464843be9b WatchSource:0}: Error finding container e9b546f131ee14e820f63bb03775ee30ed71ee413e2306e6dbe193464843be9b: Status 404 returned error can't find the container with id e9b546f131ee14e820f63bb03775ee30ed71ee413e2306e6dbe193464843be9b Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.524273 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.536628 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.550082 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.563931 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.578943 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.592781 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.606365 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.606407 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.606421 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.606440 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.606452 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.608997 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.624779 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-gn7qq"] Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.625210 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.625265 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.626135 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.638350 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.653182 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.671462 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.679692 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.679751 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctzwh\" (UniqueName: \"kubernetes.io/projected/f5cf5212-af00-4788-ad5f-ff824fea7c0f-kube-api-access-ctzwh\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.689305 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.701674 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.709152 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.709221 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.709235 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.709255 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.709266 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.710292 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://171f7bda3b2b85e2918b75df795cc29c8433c9ec9424935203d35fce02b7b026\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:12Z\\\",\\\"message\\\":\\\"1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 14:59:12.272998 6138 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 14:59:12.273032 6138 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 14:59:12.273069 6138 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 14:59:12.273120 6138 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 14:59:12.273189 6138 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 14:59:12.273198 6138 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 14:59:12.273208 6138 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 14:59:12.273212 6138 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 14:59:12.273224 6138 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 14:59:12.273238 6138 factory.go:656] Stopping watch factory\\\\nI1205 14:59:12.273250 6138 ovnkube.go:599] Stopped ovnkube\\\\nI1205 14:59:12.273273 6138 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 14:59:12.273283 6138 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 14:59:12.273289 6138 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 14:59:12.273295 6138 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 14:59:1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.742081 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.756364 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.773690 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.781970 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctzwh\" (UniqueName: \"kubernetes.io/projected/f5cf5212-af00-4788-ad5f-ff824fea7c0f-kube-api-access-ctzwh\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.782105 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.782240 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:15 crc kubenswrapper[4840]: E1205 14:59:15.782302 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 14:59:16.282283489 +0000 UTC m=+34.623346093 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.786430 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.801622 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctzwh\" (UniqueName: \"kubernetes.io/projected/f5cf5212-af00-4788-ad5f-ff824fea7c0f-kube-api-access-ctzwh\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.804681 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.811668 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.811705 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.811714 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.811727 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.811737 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.820028 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.829652 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.841372 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.857182 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.869839 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.880790 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:15Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.913847 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.913915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.913927 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.913945 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:15 crc kubenswrapper[4840]: I1205 14:59:15.913958 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:15Z","lastTransitionTime":"2025-12-05T14:59:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.016366 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.016406 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.016418 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.016435 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.016447 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.066733 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.066836 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.066733 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:16 crc kubenswrapper[4840]: E1205 14:59:16.066957 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:16 crc kubenswrapper[4840]: E1205 14:59:16.067089 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:16 crc kubenswrapper[4840]: E1205 14:59:16.067164 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.118780 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.118827 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.118837 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.118852 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.118880 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.221147 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.221178 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.221186 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.221200 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.221217 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.289281 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:16 crc kubenswrapper[4840]: E1205 14:59:16.289438 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:16 crc kubenswrapper[4840]: E1205 14:59:16.289528 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 14:59:17.289508845 +0000 UTC m=+35.630571459 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.323984 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.324024 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.324036 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.324052 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.324065 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.392834 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/1.log" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.396572 4840 scope.go:117] "RemoveContainer" containerID="9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc" Dec 05 14:59:16 crc kubenswrapper[4840]: E1205 14:59:16.396730 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.399150 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" event={"ID":"a03adad7-7e03-4bc8-9a48-98dff0e91cc9","Type":"ContainerStarted","Data":"ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.399187 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" event={"ID":"a03adad7-7e03-4bc8-9a48-98dff0e91cc9","Type":"ContainerStarted","Data":"d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.399199 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" event={"ID":"a03adad7-7e03-4bc8-9a48-98dff0e91cc9","Type":"ContainerStarted","Data":"e9b546f131ee14e820f63bb03775ee30ed71ee413e2306e6dbe193464843be9b"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.426495 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.426536 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.426546 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.426565 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.426577 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.431409 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.441208 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.452551 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.468452 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.481185 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.501521 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.518005 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.529077 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.529117 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.529128 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.529146 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.529156 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.533043 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.554732 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.569349 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.584841 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.606031 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.619416 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.631395 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.631443 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.631455 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.631471 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.631484 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.633401 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.651702 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.667595 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:16Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.733932 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.733978 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.733986 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.734002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.734011 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.836272 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.836349 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.836370 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.836400 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.836422 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.939543 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.939603 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.939615 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.939633 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:16 crc kubenswrapper[4840]: I1205 14:59:16.939646 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:16Z","lastTransitionTime":"2025-12-05T14:59:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.041987 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.042024 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.042038 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.042055 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.042066 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.066383 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:17 crc kubenswrapper[4840]: E1205 14:59:17.066514 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.144683 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.144742 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.144754 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.144773 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.144787 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.246621 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.246673 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.246689 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.246709 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.246728 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.299106 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:17 crc kubenswrapper[4840]: E1205 14:59:17.299270 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:17 crc kubenswrapper[4840]: E1205 14:59:17.299337 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 14:59:19.299323158 +0000 UTC m=+37.640385772 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.350212 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.350257 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.350269 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.350285 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.350295 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.418653 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.434011 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.452941 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.452995 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.453006 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.453025 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.453050 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.454794 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.469254 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.482250 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.495618 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.506248 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.517965 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.533275 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.550460 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.555470 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.555515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.555525 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.555543 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.555556 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.565104 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.574850 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.585495 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.599058 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.619161 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.630738 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:17Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.658075 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.658116 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.658126 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.658142 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.658155 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.760091 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.760129 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.760138 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.760151 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.760159 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.862483 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.862649 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.862689 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.862722 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.862746 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.966183 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.966243 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.966263 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.966288 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:17 crc kubenswrapper[4840]: I1205 14:59:17.966310 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:17Z","lastTransitionTime":"2025-12-05T14:59:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.006936 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.007047 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.007088 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007165 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007210 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007223 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007249 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007271 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007285 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:34.007268119 +0000 UTC m=+52.348330733 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007393 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:34.007355672 +0000 UTC m=+52.348418326 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.007559 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:34.007530656 +0000 UTC m=+52.348593320 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.066274 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.066356 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.066304 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.066450 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.066548 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.066774 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.068664 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.068746 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.068768 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.068789 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.068808 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.108639 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.108828 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.108849 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 14:59:34.108813229 +0000 UTC m=+52.449875843 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.109117 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.109171 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.109203 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:18 crc kubenswrapper[4840]: E1205 14:59:18.109317 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 14:59:34.109285862 +0000 UTC m=+52.450348516 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.172597 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.172679 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.172705 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.172737 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.172759 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.277597 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.277637 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.277644 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.277659 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.277673 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.380442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.380485 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.380493 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.380508 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.380517 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.482777 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.482801 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.482811 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.482823 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.482831 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.585623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.585677 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.585696 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.585732 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.585747 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.689204 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.689268 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.689280 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.689303 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.689316 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.792180 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.792231 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.792246 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.792264 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.792282 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.894898 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.894948 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.894960 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.894976 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.894988 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.998569 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.998628 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.998639 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.998658 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:18 crc kubenswrapper[4840]: I1205 14:59:18.998672 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:18Z","lastTransitionTime":"2025-12-05T14:59:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.065818 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:19 crc kubenswrapper[4840]: E1205 14:59:19.066096 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.101884 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.101947 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.101960 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.101978 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.101991 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.204289 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.204331 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.204343 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.204357 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.204366 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.306839 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.306904 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.306918 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.306933 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.306944 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.323425 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:19 crc kubenswrapper[4840]: E1205 14:59:19.323616 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:19 crc kubenswrapper[4840]: E1205 14:59:19.323679 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 14:59:23.323660857 +0000 UTC m=+41.664723481 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.408832 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.408886 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.408894 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.408907 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.408916 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.512259 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.512298 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.512306 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.512319 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.512329 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.615287 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.615353 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.615373 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.615394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.615408 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.718055 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.718756 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.718798 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.718819 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.718832 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.823716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.823825 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.823838 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.823854 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.823919 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.927047 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.927085 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.927095 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.927109 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:19 crc kubenswrapper[4840]: I1205 14:59:19.927124 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:19Z","lastTransitionTime":"2025-12-05T14:59:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.030254 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.030337 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.030376 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.030412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.030437 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.066826 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.066932 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.066856 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:20 crc kubenswrapper[4840]: E1205 14:59:20.067109 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:20 crc kubenswrapper[4840]: E1205 14:59:20.067193 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:20 crc kubenswrapper[4840]: E1205 14:59:20.067272 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.134022 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.134118 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.134143 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.134166 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.134184 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.237639 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.237685 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.237694 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.237709 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.237719 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.340433 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.340489 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.340505 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.340525 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.340541 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.442898 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.442962 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.442976 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.442997 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.443017 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.545908 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.546382 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.546459 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.546491 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.546516 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.649359 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.649478 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.649490 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.649511 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.649523 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.752661 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.752704 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.752713 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.752728 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.752740 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.855105 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.855213 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.855239 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.855261 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.855275 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.957983 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.958054 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.958067 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.958093 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:20 crc kubenswrapper[4840]: I1205 14:59:20.958107 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:20Z","lastTransitionTime":"2025-12-05T14:59:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.061530 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.061592 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.061604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.061650 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.061663 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.065849 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:21 crc kubenswrapper[4840]: E1205 14:59:21.065983 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.164265 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.164307 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.164318 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.164334 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.164347 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.266719 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.266772 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.266789 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.266806 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.266816 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.369959 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.369992 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.370002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.370020 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.370031 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.471745 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.471792 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.471811 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.471829 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.471840 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.574782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.574849 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.574895 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.574919 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.574936 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.677506 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.677543 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.677553 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.677568 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.677576 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.766937 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.780062 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.780094 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.780104 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.780115 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.780125 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.782737 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.799749 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.813700 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.824463 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.838090 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.849076 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.866432 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.878732 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.882525 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.882564 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.882580 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.882598 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.882611 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.890329 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.904820 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.918905 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.930788 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.943955 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.962684 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.976118 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.988788 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.988910 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.988952 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.988976 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:21 crc kubenswrapper[4840]: I1205 14:59:21.988989 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:21Z","lastTransitionTime":"2025-12-05T14:59:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.002103 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:21Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.066713 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.066764 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:22 crc kubenswrapper[4840]: E1205 14:59:22.066853 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.066728 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:22 crc kubenswrapper[4840]: E1205 14:59:22.067036 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:22 crc kubenswrapper[4840]: E1205 14:59:22.067167 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.083807 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.091860 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.091960 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.091980 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.092018 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.092036 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.099333 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.115671 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.128374 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.141298 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.158912 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.184157 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.194853 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.194907 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.194916 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.194932 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.194942 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.198957 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.209941 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.222695 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.235538 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.249779 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.265530 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.279754 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.296882 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.296924 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.296937 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.296953 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.296964 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.305693 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.319365 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:22Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.399678 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.399736 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.399748 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.399770 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.399784 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.503276 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.503329 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.503341 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.503360 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.503373 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.608935 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.609000 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.609014 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.609037 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.609051 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.712256 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.712304 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.712336 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.712358 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.712371 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.814490 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.814621 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.814638 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.814665 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.814681 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.918599 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.918692 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.918707 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.918727 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:22 crc kubenswrapper[4840]: I1205 14:59:22.918742 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:22Z","lastTransitionTime":"2025-12-05T14:59:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.034236 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.034280 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.034294 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.034315 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.034331 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.066643 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:23 crc kubenswrapper[4840]: E1205 14:59:23.066858 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.136534 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.136582 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.136592 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.136607 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.136621 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.239335 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.239408 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.239425 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.239447 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.239462 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.341801 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.341844 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.341854 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.342155 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.342188 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.369666 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:23 crc kubenswrapper[4840]: E1205 14:59:23.369820 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:23 crc kubenswrapper[4840]: E1205 14:59:23.369903 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 14:59:31.369883725 +0000 UTC m=+49.710946339 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.444468 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.444527 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.444552 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.444574 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.444592 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.547649 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.547700 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.547714 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.547733 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.547744 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.650695 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.650774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.650791 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.650816 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.650830 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.753561 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.753600 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.753609 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.753623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.753633 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.857014 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.857062 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.857073 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.857093 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.857104 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.960331 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.960371 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.960383 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.960401 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:23 crc kubenswrapper[4840]: I1205 14:59:23.960412 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:23Z","lastTransitionTime":"2025-12-05T14:59:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.063821 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.063899 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.063916 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.063938 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.063957 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.066108 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.066182 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:24 crc kubenswrapper[4840]: E1205 14:59:24.066243 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.066255 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:24 crc kubenswrapper[4840]: E1205 14:59:24.066699 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:24 crc kubenswrapper[4840]: E1205 14:59:24.066933 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.166315 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.166363 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.166377 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.166394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.166406 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.269410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.269486 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.269508 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.269537 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.269560 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.372167 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.372203 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.372246 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.372262 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.372273 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.474485 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.474524 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.474533 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.474548 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.474557 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.576965 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.577013 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.577025 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.577043 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.577055 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.679747 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.679786 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.679796 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.679812 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.679825 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.782727 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.782792 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.782808 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.782832 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.782850 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.885450 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.885487 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.885498 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.885512 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.885526 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.988272 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.988317 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.988331 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.988350 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:24 crc kubenswrapper[4840]: I1205 14:59:24.988363 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:24Z","lastTransitionTime":"2025-12-05T14:59:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.066468 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:25 crc kubenswrapper[4840]: E1205 14:59:25.066667 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.091125 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.091166 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.091177 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.091193 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.091208 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.193559 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.193605 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.193614 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.193631 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.193643 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.329646 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.329703 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.329712 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.329726 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.329754 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.332687 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.332709 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.332717 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.332728 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.332736 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: E1205 14:59:25.344370 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:25Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.347166 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.347199 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.347210 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.347223 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.347233 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: E1205 14:59:25.357705 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:25Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.361320 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.361365 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.361375 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.361389 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.361400 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: E1205 14:59:25.372118 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:25Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.375413 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.375470 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.375480 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.375499 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.375514 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: E1205 14:59:25.386834 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:25Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.390897 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.390972 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.390985 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.391004 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.391013 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: E1205 14:59:25.404187 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:25Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:25 crc kubenswrapper[4840]: E1205 14:59:25.404320 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.431735 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.431765 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.431774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.431787 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.431796 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.534314 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.534357 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.534369 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.534387 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.534400 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.637695 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.637751 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.637761 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.637781 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.637792 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.741009 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.741058 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.741072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.741093 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.741105 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.844577 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.844631 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.844649 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.844676 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.844697 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.947136 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.947195 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.947206 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.947224 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:25 crc kubenswrapper[4840]: I1205 14:59:25.947238 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:25Z","lastTransitionTime":"2025-12-05T14:59:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.050998 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.051077 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.051101 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.051132 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.051155 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.066705 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.066768 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.066788 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:26 crc kubenswrapper[4840]: E1205 14:59:26.066939 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:26 crc kubenswrapper[4840]: E1205 14:59:26.067031 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:26 crc kubenswrapper[4840]: E1205 14:59:26.067136 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.154362 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.154392 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.154400 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.154411 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.154421 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.257570 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.257637 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.257656 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.257678 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.257696 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.361154 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.361221 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.361243 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.361268 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.361289 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.463709 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.463770 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.463790 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.463814 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.463833 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.567297 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.567352 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.567366 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.567384 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.567398 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.670247 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.670311 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.670332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.670356 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.670376 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.773042 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.773094 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.773109 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.773130 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.773145 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.875970 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.876027 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.876048 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.876071 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.876088 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.978969 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.979022 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.979031 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.979046 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:26 crc kubenswrapper[4840]: I1205 14:59:26.979056 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:26Z","lastTransitionTime":"2025-12-05T14:59:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.066454 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:27 crc kubenswrapper[4840]: E1205 14:59:27.066677 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.080984 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.081035 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.081045 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.081065 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.081081 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.183063 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.183092 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.183104 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.183115 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.183124 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.285913 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.285957 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.285970 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.285986 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.285998 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.388372 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.388427 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.388437 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.388453 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.388463 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.491470 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.491520 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.491529 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.491546 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.491558 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.593920 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.593964 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.593974 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.593989 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.593999 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.696788 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.696858 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.696906 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.696929 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.696943 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.799276 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.799323 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.799335 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.799350 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.799363 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.902003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.902054 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.902068 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.902084 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:27 crc kubenswrapper[4840]: I1205 14:59:27.902097 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:27Z","lastTransitionTime":"2025-12-05T14:59:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.005146 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.005193 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.005203 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.005218 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.005232 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.066486 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.066573 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.066504 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:28 crc kubenswrapper[4840]: E1205 14:59:28.066703 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:28 crc kubenswrapper[4840]: E1205 14:59:28.066819 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:28 crc kubenswrapper[4840]: E1205 14:59:28.066999 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.107928 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.107965 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.107974 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.107990 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.108003 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.210393 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.210480 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.210496 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.210615 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.210648 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.312636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.312672 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.312680 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.312693 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.312735 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.415768 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.415819 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.415830 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.415845 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.415856 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.518149 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.518186 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.518196 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.518211 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.518222 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.621219 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.621256 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.621265 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.621280 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.621291 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.723919 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.723963 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.723982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.724002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.724016 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.826430 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.826478 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.826487 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.826501 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.826510 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.929275 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.929324 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.929341 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.929364 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:28 crc kubenswrapper[4840]: I1205 14:59:28.929381 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:28Z","lastTransitionTime":"2025-12-05T14:59:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.032198 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.032279 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.032302 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.032334 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.032353 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.066050 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:29 crc kubenswrapper[4840]: E1205 14:59:29.066195 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.135264 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.135315 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.135328 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.135344 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.135355 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.238551 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.238604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.238645 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.238666 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.238679 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.341407 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.341483 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.341505 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.341530 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.341547 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.443471 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.443507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.443515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.443529 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.443539 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.545643 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.545707 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.545725 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.545747 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.545764 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.648585 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.648636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.648653 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.648677 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.648694 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.751511 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.751561 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.751573 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.751588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.751603 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.854406 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.854442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.854450 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.854463 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.854471 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.957918 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.957987 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.958009 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.958039 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:29 crc kubenswrapper[4840]: I1205 14:59:29.958064 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:29Z","lastTransitionTime":"2025-12-05T14:59:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.060294 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.060388 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.060415 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.060440 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.060459 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.067296 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.067457 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.067522 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.067763 4840 scope.go:117] "RemoveContainer" containerID="9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc" Dec 05 14:59:30 crc kubenswrapper[4840]: E1205 14:59:30.067805 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:30 crc kubenswrapper[4840]: E1205 14:59:30.068010 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:30 crc kubenswrapper[4840]: E1205 14:59:30.067685 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.163550 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.164084 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.164101 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.164121 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.164137 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.267550 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.267898 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.267963 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.268027 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.268083 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.371523 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.371946 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.372159 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.372380 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.372570 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.475608 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.475655 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.475666 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.475681 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.475691 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.578673 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.578741 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.578827 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.578847 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.578878 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.682273 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.682302 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.682310 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.682322 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.682330 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.785265 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.785304 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.785313 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.785330 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.785338 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.887667 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.887711 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.887724 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.887740 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.887751 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.893848 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.901122 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.918628 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.950159 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.961335 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.973175 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.986411 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.989978 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.990008 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.990019 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.990034 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.990046 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:30Z","lastTransitionTime":"2025-12-05T14:59:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:30 crc kubenswrapper[4840]: I1205 14:59:30.996605 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:30Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.007173 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.030018 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.045682 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.066331 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:31 crc kubenswrapper[4840]: E1205 14:59:31.066440 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.075676 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.087281 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.091883 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.091926 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.091939 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.091954 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.091964 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.106132 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.115255 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.124340 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.143827 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.156706 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.194467 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.194515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.194530 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.194547 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.194559 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.316814 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.316857 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.316884 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.316903 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.316914 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.418700 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.418767 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.418784 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.418806 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.418820 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.442966 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:31 crc kubenswrapper[4840]: E1205 14:59:31.443131 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:31 crc kubenswrapper[4840]: E1205 14:59:31.443225 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 14:59:47.44320763 +0000 UTC m=+65.784270234 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.455096 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/1.log" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.461603 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.462317 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.478906 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.491833 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.506295 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.517240 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.520751 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.520808 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.520819 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.520833 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.520842 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.531445 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.547546 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.564539 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.584964 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.598212 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.609253 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.623231 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.623306 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.623324 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.623344 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.623359 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.624986 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.640489 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.651470 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.662326 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.673453 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.681990 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.691425 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:31Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.726156 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.726191 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.726202 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.726218 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.726230 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.828516 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.828578 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.828599 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.828623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.828640 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.931397 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.931493 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.931518 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.931548 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:31 crc kubenswrapper[4840]: I1205 14:59:31.931567 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:31Z","lastTransitionTime":"2025-12-05T14:59:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.033751 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.033793 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.033807 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.033829 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.033852 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.066512 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.066570 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.066528 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:32 crc kubenswrapper[4840]: E1205 14:59:32.066710 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:32 crc kubenswrapper[4840]: E1205 14:59:32.066903 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:32 crc kubenswrapper[4840]: E1205 14:59:32.066988 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.084102 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.092612 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.101554 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.111116 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.123470 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.132977 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.135975 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.136087 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.136160 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.136254 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.136336 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.147179 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.159262 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.175585 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.188382 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.199179 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.209054 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.221084 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.233153 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.238506 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.238563 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.238582 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.238609 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.238634 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.247018 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.265461 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.281323 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.340531 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.340563 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.340572 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.340586 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.340596 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.443106 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.443158 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.443167 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.443183 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.443197 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.466111 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/2.log" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.466661 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/1.log" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.468840 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da" exitCode=1 Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.468985 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.469085 4840 scope.go:117] "RemoveContainer" containerID="9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.469573 4840 scope.go:117] "RemoveContainer" containerID="02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da" Dec 05 14:59:32 crc kubenswrapper[4840]: E1205 14:59:32.469731 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.490851 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.502922 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.517103 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.530824 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.542719 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.545656 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.545684 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.545693 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.545707 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.545717 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.564817 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ce51feee450c9defe5353828640e5ce80afcf0816c33f74dd117830feacffdc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"message\\\":\\\".org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515549 6284 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 14:59:13.515537 6284 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 14:59:13.515752 6284 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.577325 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.590078 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.606071 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.621210 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.635848 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.648010 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.648044 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.648057 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.648073 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.648082 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.648328 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.661456 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.671309 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.681691 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.693742 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.704638 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:32Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.750569 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.750615 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.750625 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.750639 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.750651 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.853628 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.853696 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.853717 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.853754 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.853767 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.956921 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.956955 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.956967 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.956982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:32 crc kubenswrapper[4840]: I1205 14:59:32.956993 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:32Z","lastTransitionTime":"2025-12-05T14:59:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.059774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.059943 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.059966 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.059990 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.060045 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.066114 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:33 crc kubenswrapper[4840]: E1205 14:59:33.066221 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.162049 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.162083 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.162091 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.162104 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.162114 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.264420 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.264458 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.264467 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.264481 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.264492 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.366785 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.366817 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.366824 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.366836 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.366845 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.469790 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.470113 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.470264 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.470420 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.470543 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.473470 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/2.log" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.477125 4840 scope.go:117] "RemoveContainer" containerID="02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da" Dec 05 14:59:33 crc kubenswrapper[4840]: E1205 14:59:33.477284 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.494943 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.512967 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.523080 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.534636 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.548526 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.560919 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.571350 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.572767 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.572797 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.572806 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.572820 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.572829 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.579789 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.591017 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.603490 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.614826 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.623234 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.634841 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.645199 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.654548 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.666388 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.674656 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.674684 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.674693 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.674704 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.674714 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.685064 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:33Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.777628 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.777672 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.777685 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.777706 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.777725 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.880081 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.880155 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.880174 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.880199 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.880217 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.982245 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.982287 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.982303 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.982321 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:33 crc kubenswrapper[4840]: I1205 14:59:33.982334 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:33Z","lastTransitionTime":"2025-12-05T14:59:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.066377 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.066511 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.066667 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.066704 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.066858 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.067022 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.071269 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.071333 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.071363 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071452 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071480 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071491 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071506 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071524 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071565 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 15:00:06.071544003 +0000 UTC m=+84.412606627 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071592 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 15:00:06.071581585 +0000 UTC m=+84.412644209 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.071608 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 15:00:06.071600535 +0000 UTC m=+84.412663159 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.084894 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.084947 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.084959 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.084982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.084995 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.172678 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.172857 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.173041 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.173059 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.173072 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.173081 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:00:06.173037412 +0000 UTC m=+84.514100056 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 14:59:34 crc kubenswrapper[4840]: E1205 14:59:34.173132 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 15:00:06.173115994 +0000 UTC m=+84.514178618 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.187979 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.188051 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.188068 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.188093 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.188116 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.291123 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.291214 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.291251 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.291283 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.291309 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.400347 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.400401 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.400414 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.400431 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.400441 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.502514 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.502761 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.502897 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.502999 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.503069 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.605246 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.605284 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.605296 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.605311 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.605324 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.707915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.708217 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.708402 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.708588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.708717 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.812915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.812990 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.813003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.813019 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.813033 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.916283 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.916342 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.916356 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.916373 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:34 crc kubenswrapper[4840]: I1205 14:59:34.916384 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:34Z","lastTransitionTime":"2025-12-05T14:59:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.018963 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.018995 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.019004 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.019017 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.019025 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.065732 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:35 crc kubenswrapper[4840]: E1205 14:59:35.066118 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.121366 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.121409 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.121423 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.121438 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.121452 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.223354 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.223934 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.223955 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.223969 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.223978 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.326740 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.326808 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.326823 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.326840 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.326851 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.433602 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.433655 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.433671 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.433691 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.433703 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.535958 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.536034 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.536048 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.536066 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.536079 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.583593 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.583638 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.583648 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.583663 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.583674 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: E1205 14:59:35.598062 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.601052 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.601086 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.601098 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.601113 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.601124 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: E1205 14:59:35.611599 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.615186 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.615212 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.615222 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.615234 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.615244 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: E1205 14:59:35.627153 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.630689 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.630720 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.630728 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.630742 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.630753 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: E1205 14:59:35.641514 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.649456 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.649507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.649520 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.649538 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.649549 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: E1205 14:59:35.662610 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:35Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:35 crc kubenswrapper[4840]: E1205 14:59:35.662770 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.664443 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.664493 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.664509 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.664528 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.664540 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.767059 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.767310 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.767327 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.767347 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.767394 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.869952 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.870002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.870012 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.870026 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.870036 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.975436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.975534 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.975591 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.975620 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:35 crc kubenswrapper[4840]: I1205 14:59:35.975650 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:35Z","lastTransitionTime":"2025-12-05T14:59:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.066618 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.066681 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:36 crc kubenswrapper[4840]: E1205 14:59:36.066786 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.066851 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:36 crc kubenswrapper[4840]: E1205 14:59:36.066984 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:36 crc kubenswrapper[4840]: E1205 14:59:36.067056 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.078499 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.078544 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.078556 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.078575 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.078587 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.182105 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.182163 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.182184 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.182213 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.182237 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.286003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.286072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.286088 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.286106 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.286118 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.389451 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.389933 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.390111 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.390272 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.390432 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.493336 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.493370 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.493382 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.493406 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.493421 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.595979 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.596021 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.596033 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.596049 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.596060 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.699081 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.699136 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.699155 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.699179 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.699197 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.809794 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.809834 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.809843 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.809859 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.809888 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.912622 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.912658 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.912667 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.912690 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:36 crc kubenswrapper[4840]: I1205 14:59:36.912710 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:36Z","lastTransitionTime":"2025-12-05T14:59:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.016139 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.016181 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.016190 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.016205 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.016217 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.066061 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:37 crc kubenswrapper[4840]: E1205 14:59:37.066317 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.122205 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.122283 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.122410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.122442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.122489 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.224310 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.224368 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.224383 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.224402 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.224418 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.327515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.327563 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.327577 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.327594 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.327606 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.429953 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.430012 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.430024 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.430041 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.430056 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.532783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.532832 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.532844 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.532889 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.532904 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.634714 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.634747 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.634757 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.634771 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.634780 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.737064 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.737109 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.737118 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.737133 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.737147 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.839802 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.839976 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.839999 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.840025 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.840042 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.943578 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.943672 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.943714 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.943745 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:37 crc kubenswrapper[4840]: I1205 14:59:37.943782 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:37Z","lastTransitionTime":"2025-12-05T14:59:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.046302 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.046346 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.046364 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.046386 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.046407 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.066082 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:38 crc kubenswrapper[4840]: E1205 14:59:38.066252 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.066495 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:38 crc kubenswrapper[4840]: E1205 14:59:38.066594 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.066820 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:38 crc kubenswrapper[4840]: E1205 14:59:38.066962 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.149769 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.149801 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.149809 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.149821 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.149830 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.253299 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.253335 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.253345 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.253377 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.253387 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.356332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.356465 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.356495 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.356528 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.356550 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.459553 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.459625 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.459642 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.459667 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.459683 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.562015 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.562078 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.562089 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.562105 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.562114 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.664369 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.664410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.664421 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.664436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.664459 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.767672 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.767715 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.767731 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.767759 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.767776 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.870765 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.870812 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.870828 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.870848 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.870885 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.973892 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.973952 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.973965 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.973985 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:38 crc kubenswrapper[4840]: I1205 14:59:38.974000 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:38Z","lastTransitionTime":"2025-12-05T14:59:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.066335 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:39 crc kubenswrapper[4840]: E1205 14:59:39.066527 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.076669 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.076702 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.076714 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.076729 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.076740 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.178364 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.178398 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.178406 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.178418 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.178428 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.280633 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.280676 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.280688 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.280703 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.280715 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.383567 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.383652 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.383664 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.383679 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.383692 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.485727 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.485760 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.485768 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.485782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.485791 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.588710 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.588748 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.588761 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.588777 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.588787 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.691901 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.692004 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.692041 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.692072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.692095 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.794784 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.794825 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.794834 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.794849 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.794886 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.897392 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.897461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.897483 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.897511 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.897536 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:39Z","lastTransitionTime":"2025-12-05T14:59:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:39 crc kubenswrapper[4840]: I1205 14:59:39.999928 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.000226 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.000349 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.000461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.000574 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.066622 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.066617 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.066695 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:40 crc kubenswrapper[4840]: E1205 14:59:40.067373 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:40 crc kubenswrapper[4840]: E1205 14:59:40.067544 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:40 crc kubenswrapper[4840]: E1205 14:59:40.067821 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.103632 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.103677 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.103690 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.103705 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.103715 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.206884 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.206934 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.206946 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.206964 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.206977 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.309828 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.309920 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.309941 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.309959 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.309973 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.413568 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.413627 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.413650 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.413673 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.413688 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.515123 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.515158 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.515169 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.515188 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.515201 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.619370 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.619417 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.619426 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.619449 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.619467 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.722072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.722113 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.722122 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.722137 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.722147 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.824678 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.824724 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.824735 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.824754 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.824765 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.926934 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.926996 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.927009 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.927026 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:40 crc kubenswrapper[4840]: I1205 14:59:40.927038 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:40Z","lastTransitionTime":"2025-12-05T14:59:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.029326 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.029367 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.029377 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.029394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.029403 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.065815 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:41 crc kubenswrapper[4840]: E1205 14:59:41.065973 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.131822 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.131895 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.131908 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.131925 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.131936 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.233928 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.233984 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.233997 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.234016 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.234028 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.336670 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.336719 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.336730 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.336746 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.336761 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.439442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.439490 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.439504 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.439537 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.439549 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.542810 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.542922 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.542951 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.542982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.543004 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.646564 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.646621 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.646642 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.646664 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.646681 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.750072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.750139 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.750158 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.750184 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.750204 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.853097 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.853162 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.853182 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.853206 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.853225 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.955954 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.956013 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.956030 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.956049 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:41 crc kubenswrapper[4840]: I1205 14:59:41.956064 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:41Z","lastTransitionTime":"2025-12-05T14:59:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.058441 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.058531 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.058555 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.058586 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.058609 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.065768 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.065820 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.065787 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:42 crc kubenswrapper[4840]: E1205 14:59:42.066032 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:42 crc kubenswrapper[4840]: E1205 14:59:42.066114 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:42 crc kubenswrapper[4840]: E1205 14:59:42.066183 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.085763 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.100651 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.139853 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.151819 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.160194 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.160220 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.160229 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.160243 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.160253 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.184310 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.208712 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.222353 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.232478 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.244508 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.255008 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.262095 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.262155 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.262168 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.262186 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.262221 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.265245 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.273771 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.283685 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.294241 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.307566 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.327073 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.340173 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:42Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.363953 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.364003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.364019 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.364041 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.364057 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.465646 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.465679 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.465688 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.465700 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.465709 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.568352 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.568398 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.568410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.568428 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.568441 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.671135 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.671458 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.671561 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.671666 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.671762 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.774666 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.774735 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.774746 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.774762 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.774771 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.877938 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.878343 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.878445 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.878550 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.878637 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.980971 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.981014 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.981024 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.981038 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:42 crc kubenswrapper[4840]: I1205 14:59:42.981048 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:42Z","lastTransitionTime":"2025-12-05T14:59:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.066750 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:43 crc kubenswrapper[4840]: E1205 14:59:43.067007 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.084703 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.084766 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.084782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.084800 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.084810 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.187272 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.187306 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.187316 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.187329 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.187337 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.291577 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.291669 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.291687 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.291716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.291732 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.393799 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.393850 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.393860 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.393891 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.393903 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.496436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.496487 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.496502 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.496520 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.496531 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.599222 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.599304 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.599330 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.599379 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.599404 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.702382 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.702439 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.702452 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.702476 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.702488 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.805379 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.805423 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.805434 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.805449 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.805460 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.907465 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.907503 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.907513 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.907528 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:43 crc kubenswrapper[4840]: I1205 14:59:43.907539 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:43Z","lastTransitionTime":"2025-12-05T14:59:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.010065 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.010101 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.010109 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.010123 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.010134 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.067932 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:44 crc kubenswrapper[4840]: E1205 14:59:44.068076 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.068248 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:44 crc kubenswrapper[4840]: E1205 14:59:44.068304 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.068428 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:44 crc kubenswrapper[4840]: E1205 14:59:44.068479 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.112736 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.112773 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.112782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.112797 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.112831 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.231634 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.231686 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.231702 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.231726 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.231745 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.334728 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.334773 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.334783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.334801 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.334811 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.437589 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.437898 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.437996 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.438138 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.438238 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.541144 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.541182 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.541191 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.541204 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.541213 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.643567 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.644027 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.644208 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.644367 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.644508 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.747370 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.747438 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.747472 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.747502 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.747526 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.850406 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.850455 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.850464 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.850482 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.850493 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.952754 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.952850 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.952883 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.952906 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:44 crc kubenswrapper[4840]: I1205 14:59:44.952923 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:44Z","lastTransitionTime":"2025-12-05T14:59:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.055524 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.055569 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.055581 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.055600 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.055612 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.065905 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:45 crc kubenswrapper[4840]: E1205 14:59:45.066081 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.158101 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.158132 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.158141 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.158152 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.158162 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.260895 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.261026 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.261047 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.261072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.261090 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.363998 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.364046 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.364063 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.364086 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.364102 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.466403 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.466432 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.466440 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.466451 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.466459 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.568615 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.568644 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.568653 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.568667 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.568675 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.671250 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.671283 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.671292 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.671307 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.671316 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.773909 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.773946 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.773962 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.773983 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.773996 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.822610 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.822658 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.822671 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.822690 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.822703 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: E1205 14:59:45.838993 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:45Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.846662 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.846700 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.846736 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.846815 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.846829 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: E1205 14:59:45.862233 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:45Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.866352 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.866386 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.866396 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.866410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.866419 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: E1205 14:59:45.877341 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:45Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.880410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.880437 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.880445 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.880456 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.880464 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: E1205 14:59:45.891652 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:45Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.894770 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.894940 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.895008 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.895077 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.895147 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:45 crc kubenswrapper[4840]: E1205 14:59:45.906470 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:45Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:45 crc kubenswrapper[4840]: E1205 14:59:45.906894 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.908320 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.908367 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.908380 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.908397 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:45 crc kubenswrapper[4840]: I1205 14:59:45.908407 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:45Z","lastTransitionTime":"2025-12-05T14:59:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.011556 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.011640 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.011670 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.011698 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.011721 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.066276 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.066313 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:46 crc kubenswrapper[4840]: E1205 14:59:46.066426 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.066444 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:46 crc kubenswrapper[4840]: E1205 14:59:46.066569 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:46 crc kubenswrapper[4840]: E1205 14:59:46.066686 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.114012 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.114048 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.114058 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.114074 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.114085 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.217181 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.217244 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.217255 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.217276 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.217289 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.319352 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.319395 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.319407 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.319423 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.319433 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.421408 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.421461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.421473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.421490 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.421502 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.523075 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.523139 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.523148 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.523160 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.523169 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.624799 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.624892 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.624914 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.624934 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.624948 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.727808 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.727848 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.727859 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.727888 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.727901 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.829975 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.830030 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.830041 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.830053 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.830062 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.932625 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.932658 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.932668 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.932683 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:46 crc kubenswrapper[4840]: I1205 14:59:46.932692 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:46Z","lastTransitionTime":"2025-12-05T14:59:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.035470 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.035528 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.035543 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.035566 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.035580 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.065920 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:47 crc kubenswrapper[4840]: E1205 14:59:47.066087 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.138028 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.138067 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.138078 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.138096 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.138108 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.240244 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.240295 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.240317 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.240338 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.240351 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.342607 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.342675 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.342692 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.342716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.342733 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.444709 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.444780 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.444795 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.444813 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.444851 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.509498 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:47 crc kubenswrapper[4840]: E1205 14:59:47.509656 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:47 crc kubenswrapper[4840]: E1205 14:59:47.509726 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 15:00:19.509713777 +0000 UTC m=+97.850776391 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.547792 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.547897 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.547916 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.547940 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.547958 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.650254 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.650358 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.650373 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.650392 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.650402 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.753811 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.753945 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.753966 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.753997 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.754015 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.856697 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.856737 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.856745 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.856760 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.856768 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.958962 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.959003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.959014 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.959031 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:47 crc kubenswrapper[4840]: I1205 14:59:47.959042 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:47Z","lastTransitionTime":"2025-12-05T14:59:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.061603 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.061675 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.061698 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.061726 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.061748 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.066448 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.066507 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.066458 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:48 crc kubenswrapper[4840]: E1205 14:59:48.066606 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:48 crc kubenswrapper[4840]: E1205 14:59:48.066766 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:48 crc kubenswrapper[4840]: E1205 14:59:48.066905 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.164524 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.164574 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.164585 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.164601 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.164616 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.267086 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.267126 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.267135 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.267149 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.267158 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.368938 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.369056 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.369118 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.369141 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.369156 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.472057 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.472107 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.472121 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.472159 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.472171 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.574412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.574461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.574469 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.574484 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.574495 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.676453 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.676487 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.676495 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.676508 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.676518 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.778967 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.779006 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.779018 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.779035 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.779047 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.881716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.881767 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.881779 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.881796 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.881808 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.984446 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.984482 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.984512 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.984528 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:48 crc kubenswrapper[4840]: I1205 14:59:48.984542 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:48Z","lastTransitionTime":"2025-12-05T14:59:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.066600 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:49 crc kubenswrapper[4840]: E1205 14:59:49.066738 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.067895 4840 scope.go:117] "RemoveContainer" containerID="02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da" Dec 05 14:59:49 crc kubenswrapper[4840]: E1205 14:59:49.069042 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.086767 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.086793 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.086802 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.086814 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.086822 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.189385 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.189442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.189455 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.189476 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.189491 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.292751 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.292814 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.292830 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.292853 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.292917 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.395623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.395685 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.395701 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.395725 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.395743 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.499473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.499520 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.499534 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.499559 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.499572 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.602786 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.603183 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.603193 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.603210 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.603221 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.705100 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.705151 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.705165 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.705185 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.705197 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.807286 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.807339 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.807349 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.807364 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.807374 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.909391 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.909461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.909485 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.909515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:49 crc kubenswrapper[4840]: I1205 14:59:49.909536 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:49Z","lastTransitionTime":"2025-12-05T14:59:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.012337 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.012395 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.012410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.012431 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.012447 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.065902 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.065965 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.065965 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:50 crc kubenswrapper[4840]: E1205 14:59:50.066041 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:50 crc kubenswrapper[4840]: E1205 14:59:50.066198 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:50 crc kubenswrapper[4840]: E1205 14:59:50.066253 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.115243 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.115302 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.115327 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.115349 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.115365 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.218939 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.219012 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.219034 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.219067 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.219090 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.322418 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.322462 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.322477 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.322493 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.322504 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.424443 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.424513 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.424530 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.424546 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.424597 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.527515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.527551 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.527560 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.527573 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.527582 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.533648 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/0.log" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.533718 4840 generic.go:334] "Generic (PLEG): container finished" podID="ffd91a64-4156-418d-8348-1efa3563e904" containerID="c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691" exitCode=1 Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.533748 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerDied","Data":"c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.534498 4840 scope.go:117] "RemoveContainer" containerID="c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.549658 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.569304 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.580538 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.594012 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.618181 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.629623 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.630446 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.630484 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.630501 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.630522 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.630539 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.640279 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.648671 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.659774 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.669622 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.677570 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.689391 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.700832 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.717686 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.733737 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.733783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.733800 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.733823 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.733840 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.738178 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.750345 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.761408 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:50Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.836340 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.836373 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.836382 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.836396 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.836407 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.938636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.938673 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.938684 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.938699 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:50 crc kubenswrapper[4840]: I1205 14:59:50.938709 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:50Z","lastTransitionTime":"2025-12-05T14:59:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.041060 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.041098 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.041110 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.041126 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.041137 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.065711 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:51 crc kubenswrapper[4840]: E1205 14:59:51.065896 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.144195 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.144235 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.144244 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.144257 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.144267 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.247201 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.247246 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.247262 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.247284 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.247296 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.350133 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.350176 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.350192 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.350212 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.350228 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.452690 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.452723 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.452732 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.452745 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.452755 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.539219 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/0.log" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.539286 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerStarted","Data":"ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.555112 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.555152 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.555161 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.555178 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.555188 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.558943 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.579999 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.591444 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.602471 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.615369 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.635608 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.645631 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.653921 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.657401 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.657427 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.657435 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.657450 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.657460 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.665000 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.678993 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.689540 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.698247 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.714522 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.725898 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.735364 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.747773 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.759436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.759470 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.759479 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.759494 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.759503 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.772295 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:51Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.861634 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.861675 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.861687 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.861704 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.861717 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.965258 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.965303 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.965315 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.965332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:51 crc kubenswrapper[4840]: I1205 14:59:51.965343 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:51Z","lastTransitionTime":"2025-12-05T14:59:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.066465 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.066538 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:52 crc kubenswrapper[4840]: E1205 14:59:52.066613 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:52 crc kubenswrapper[4840]: E1205 14:59:52.066746 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.066847 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:52 crc kubenswrapper[4840]: E1205 14:59:52.067117 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.068069 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.068094 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.068102 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.068116 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.068127 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.077150 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.093358 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.104511 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.114536 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.129063 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.149348 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.158411 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.168398 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.169567 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.169700 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.169769 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.169856 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.169964 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.182360 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.197096 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.209879 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.221669 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.234432 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.244734 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.256287 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.272217 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.272248 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.272259 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.272273 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.272285 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.275628 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.289269 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:52Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.375338 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.375384 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.375402 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.375421 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.375431 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.477824 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.477886 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.477894 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.477907 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.477916 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.585015 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.585060 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.585071 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.585087 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.585099 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.686809 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.686853 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.686862 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.686894 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.686907 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.790033 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.790074 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.790082 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.790101 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.790110 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.893278 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.893320 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.893329 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.893344 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.893357 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.996087 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.996679 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.996694 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.996709 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:52 crc kubenswrapper[4840]: I1205 14:59:52.996719 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:52Z","lastTransitionTime":"2025-12-05T14:59:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.065830 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:53 crc kubenswrapper[4840]: E1205 14:59:53.065986 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.076438 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.099259 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.099302 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.099313 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.099331 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.099351 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.202202 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.202233 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.202241 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.202255 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.202264 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.304902 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.304932 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.304941 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.304955 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.304963 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.407630 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.407673 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.407700 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.407723 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.407735 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.510088 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.510143 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.510161 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.510183 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.510202 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.612920 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.612962 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.612973 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.612998 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.613010 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.715369 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.715453 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.715468 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.715500 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.715514 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.817394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.817463 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.817477 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.817497 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.817509 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.920636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.920697 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.920715 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.920739 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:53 crc kubenswrapper[4840]: I1205 14:59:53.920756 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:53Z","lastTransitionTime":"2025-12-05T14:59:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.022945 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.022972 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.022980 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.022993 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.023002 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.066151 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.066199 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:54 crc kubenswrapper[4840]: E1205 14:59:54.066285 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.066154 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:54 crc kubenswrapper[4840]: E1205 14:59:54.066516 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:54 crc kubenswrapper[4840]: E1205 14:59:54.066630 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.125263 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.125308 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.125322 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.125341 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.125356 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.228774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.228929 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.228954 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.228982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.229001 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.332183 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.332223 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.332233 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.332251 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.332263 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.434704 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.434743 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.434754 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.434770 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.434782 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.537374 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.537412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.537422 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.537436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.537445 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.639347 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.639391 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.639467 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.639490 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.639505 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.741893 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.741946 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.741958 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.741979 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.741993 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.844497 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.844532 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.844540 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.844554 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.844568 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.953023 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.953087 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.953120 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.953148 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:54 crc kubenswrapper[4840]: I1205 14:59:54.953168 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:54Z","lastTransitionTime":"2025-12-05T14:59:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.056723 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.056797 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.056820 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.056907 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.056956 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.066102 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:55 crc kubenswrapper[4840]: E1205 14:59:55.066300 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.159989 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.160041 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.160053 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.160071 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.160084 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.262308 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.262386 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.262409 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.262439 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.262461 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.365371 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.365441 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.365456 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.365806 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.365828 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.468194 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.468232 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.468245 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.468262 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.468273 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.571055 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.571100 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.571112 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.571128 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.571142 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.673994 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.674044 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.674061 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.674081 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.674096 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.776354 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.776404 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.776415 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.776433 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.776445 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.878189 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.878240 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.878253 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.878272 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.878285 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.937207 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.937290 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.937305 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.937325 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.937363 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: E1205 14:59:55.951438 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:55Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.956067 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.956207 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.956230 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.956251 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.956266 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: E1205 14:59:55.969727 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:55Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.973340 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.973405 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.973483 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.973515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.973539 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:55 crc kubenswrapper[4840]: E1205 14:59:55.989725 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:55Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.994623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.994760 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.994843 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.994972 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:55 crc kubenswrapper[4840]: I1205 14:59:55.995069 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:55Z","lastTransitionTime":"2025-12-05T14:59:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: E1205 14:59:56.008377 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:56Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.012119 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.012201 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.012216 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.012238 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.012255 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: E1205 14:59:56.025534 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T14:59:56Z is after 2025-08-24T17:21:41Z" Dec 05 14:59:56 crc kubenswrapper[4840]: E1205 14:59:56.025727 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.027309 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.027343 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.027353 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.027368 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.027378 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.066006 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.066048 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.066078 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:56 crc kubenswrapper[4840]: E1205 14:59:56.066140 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:56 crc kubenswrapper[4840]: E1205 14:59:56.066230 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:56 crc kubenswrapper[4840]: E1205 14:59:56.066335 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.129565 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.129612 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.129636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.129658 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.129675 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.233002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.233037 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.233046 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.233062 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.233070 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.335279 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.335672 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.335842 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.336075 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.336223 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.439507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.439785 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.439917 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.440007 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.440117 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.542489 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.542766 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.542911 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.543106 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.543247 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.646464 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.646536 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.646560 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.646607 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.646632 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.749915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.750013 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.750060 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.750084 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.750100 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.853851 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.854055 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.854095 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.854129 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.854153 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.957121 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.957267 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.957294 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.957324 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:56 crc kubenswrapper[4840]: I1205 14:59:56.957348 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:56Z","lastTransitionTime":"2025-12-05T14:59:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.060300 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.060336 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.060346 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.060362 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.060375 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.065713 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:57 crc kubenswrapper[4840]: E1205 14:59:57.065904 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.163666 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.164103 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.164113 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.164127 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.164136 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.267188 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.267243 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.267257 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.267277 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.267293 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.370215 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.370261 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.370277 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.370296 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.370307 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.471732 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.471772 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.471796 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.471811 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.471821 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.574666 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.574695 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.574703 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.574716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.574726 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.677192 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.677387 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.677426 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.677462 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.677486 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.781002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.781203 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.781225 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.781294 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.781309 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.884976 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.885029 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.885044 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.885064 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.885076 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.987624 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.987683 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.987695 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.987714 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:57 crc kubenswrapper[4840]: I1205 14:59:57.987726 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:57Z","lastTransitionTime":"2025-12-05T14:59:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.066461 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.066545 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.066474 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 14:59:58 crc kubenswrapper[4840]: E1205 14:59:58.066619 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 14:59:58 crc kubenswrapper[4840]: E1205 14:59:58.066678 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 14:59:58 crc kubenswrapper[4840]: E1205 14:59:58.066753 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.090242 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.090287 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.090301 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.090323 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.090339 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.193531 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.194561 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.194592 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.194622 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.194643 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.298072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.298154 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.298178 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.298211 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.298233 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.401210 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.401267 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.401285 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.401306 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.401321 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.503803 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.503858 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.503904 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.503922 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.503934 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.607341 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.607375 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.607384 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.607400 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.607410 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.709662 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.709701 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.709713 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.709729 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.709741 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.812640 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.812682 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.812692 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.812708 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.812719 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.915416 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.915461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.915473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.915493 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:58 crc kubenswrapper[4840]: I1205 14:59:58.915505 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:58Z","lastTransitionTime":"2025-12-05T14:59:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.017968 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.018009 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.018017 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.018030 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.018039 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.066434 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 14:59:59 crc kubenswrapper[4840]: E1205 14:59:59.066588 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.121127 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.121175 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.121184 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.121198 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.121207 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.223425 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.223463 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.223485 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.223500 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.223511 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.325772 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.325826 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.325835 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.325848 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.325859 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.428565 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.428626 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.428639 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.428655 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.428664 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.531719 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.531761 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.531771 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.531788 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.531800 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.634594 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.634635 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.634646 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.634662 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.634676 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.737653 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.737706 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.737721 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.737739 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.737751 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.840551 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.840591 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.840602 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.840621 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.840633 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.943542 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.943595 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.943608 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.943626 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 14:59:59 crc kubenswrapper[4840]: I1205 14:59:59.943638 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T14:59:59Z","lastTransitionTime":"2025-12-05T14:59:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.045984 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.046055 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.046077 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.046105 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.046124 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.066399 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:00 crc kubenswrapper[4840]: E1205 15:00:00.066578 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.066426 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.066598 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:00 crc kubenswrapper[4840]: E1205 15:00:00.066966 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:00 crc kubenswrapper[4840]: E1205 15:00:00.066754 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.147959 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.147994 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.148003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.148015 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.148023 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.251343 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.251378 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.251389 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.251401 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.251409 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.354679 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.354711 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.354720 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.354733 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.354743 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.457782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.457832 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.457840 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.457857 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.457890 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.560339 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.560374 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.560384 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.560398 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.560409 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.662800 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.662855 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.662899 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.662916 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.662928 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.765636 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.765677 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.765702 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.765717 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.765728 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.868563 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.868604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.868615 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.868631 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.868643 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.971424 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.971783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.971798 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.971815 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:00 crc kubenswrapper[4840]: I1205 15:00:00.971829 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:00Z","lastTransitionTime":"2025-12-05T15:00:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.066320 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:01 crc kubenswrapper[4840]: E1205 15:00:01.066464 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.074333 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.074394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.074410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.074433 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.074448 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.177043 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.177089 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.177104 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.177122 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.177136 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.279359 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.279401 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.279410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.279424 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.279433 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.381593 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.381633 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.381641 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.381656 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.381667 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.483931 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.483965 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.483973 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.483987 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.483996 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.587269 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.587311 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.587324 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.587338 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.587349 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.690773 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.690838 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.690860 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.690925 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.690948 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.793830 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.793874 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.793900 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.793920 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.793933 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.897415 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.897464 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.897474 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.897494 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:01 crc kubenswrapper[4840]: I1205 15:00:01.897506 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:01Z","lastTransitionTime":"2025-12-05T15:00:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:01.999956 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.000303 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.000324 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.000344 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.000358 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.066701 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.066756 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.066714 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:02 crc kubenswrapper[4840]: E1205 15:00:02.066854 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:02 crc kubenswrapper[4840]: E1205 15:00:02.066970 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:02 crc kubenswrapper[4840]: E1205 15:00:02.067086 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.079297 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.092604 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.103220 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.103255 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.103265 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.103304 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.103318 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.109684 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.123681 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.147419 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.166105 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.190663 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.201847 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.205507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.205542 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.205555 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.205573 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.205586 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.214680 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.227265 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.238409 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.253197 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.263648 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.277647 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.294330 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.304936 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.307314 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.307351 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.307363 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.307380 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.307391 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.315026 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.323818 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b5fbbdb-04ab-4562-a173-3e9e90303274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c876f3a0a8f04321a7811abc93079381e36b22b7757233f72a40ddc96858bed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:02Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.409408 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.409449 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.409459 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.409473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.409482 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.512095 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.512131 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.512140 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.512154 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.512165 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.614760 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.614797 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.614805 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.614818 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.614826 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.717145 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.717216 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.717229 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.717265 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.717281 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.819918 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.819977 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.819999 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.820031 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.820053 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.922699 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.922804 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.922823 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.922848 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:02 crc kubenswrapper[4840]: I1205 15:00:02.923086 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:02Z","lastTransitionTime":"2025-12-05T15:00:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.027157 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.027220 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.027244 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.027269 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.027287 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.066682 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:03 crc kubenswrapper[4840]: E1205 15:00:03.067113 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.067309 4840 scope.go:117] "RemoveContainer" containerID="02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.129515 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.129560 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.129571 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.129588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.129599 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.231989 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.232033 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.232043 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.232061 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.232074 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.335585 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.335629 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.335640 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.335658 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.335669 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.438131 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.438170 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.438181 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.438209 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.438221 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.541098 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.541141 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.541152 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.541184 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.541195 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.576184 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/2.log" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.580284 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.580829 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.596315 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.609355 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.622035 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.635797 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.644478 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.644547 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.644560 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.644575 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.644587 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.653191 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T15:00:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.663617 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b5fbbdb-04ab-4562-a173-3e9e90303274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c876f3a0a8f04321a7811abc93079381e36b22b7757233f72a40ddc96858bed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.677172 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.697909 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.715347 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.731705 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.747517 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.747577 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.747588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.747609 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.747622 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.750627 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.766624 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.778942 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.797192 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.812371 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.828204 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.840342 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.849433 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.849480 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.849490 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.849507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.849517 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.849630 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:03Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.952176 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.952208 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.952216 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.952229 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:03 crc kubenswrapper[4840]: I1205 15:00:03.952237 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:03Z","lastTransitionTime":"2025-12-05T15:00:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.054130 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.054171 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.054180 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.054193 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.054204 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.066480 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.066511 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:04 crc kubenswrapper[4840]: E1205 15:00:04.066615 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.066651 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:04 crc kubenswrapper[4840]: E1205 15:00:04.066779 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:04 crc kubenswrapper[4840]: E1205 15:00:04.066859 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.156890 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.156930 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.156941 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.156957 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.156968 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.259119 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.259161 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.259176 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.259191 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.259202 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.361553 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.361583 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.361591 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.361604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.361614 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.464447 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.464507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.464524 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.464548 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.464564 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.567005 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.567038 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.567046 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.567057 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.567066 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.669470 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.669517 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.669540 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.669590 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.669624 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.771725 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.771817 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.771841 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.771861 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.771910 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.874537 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.874612 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.874623 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.874643 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.874655 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.976895 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.976939 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.976949 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.976964 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:04 crc kubenswrapper[4840]: I1205 15:00:04.976977 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:04Z","lastTransitionTime":"2025-12-05T15:00:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.066334 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:05 crc kubenswrapper[4840]: E1205 15:00:05.066585 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.079863 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.079943 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.079967 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.079995 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.080019 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.182699 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.182815 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.182830 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.182845 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.182856 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.285739 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.285786 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.285800 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.285817 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.285829 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.389079 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.389141 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.389156 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.389177 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.389191 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.492317 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.492371 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.492385 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.492410 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.492426 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.588194 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/3.log" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.589136 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/2.log" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.592253 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616" exitCode=1 Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.592294 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.592329 4840 scope.go:117] "RemoveContainer" containerID="02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.593034 4840 scope.go:117] "RemoveContainer" containerID="138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616" Dec 05 15:00:05 crc kubenswrapper[4840]: E1205 15:00:05.593191 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.602589 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.602932 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.602949 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.602967 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.602979 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.614772 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.631821 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.650267 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.664071 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.679177 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.700638 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T15:00:04Z\\\",\\\"message\\\":\\\"ck-source-55646444c4-trplf\\\\nI1205 15:00:04.385812 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 15:00:04.385818 6928 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 15:00:04.385818 6928 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 15:00:04.385819 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 15:00:04.385839 6928 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 15:00:04.385851 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1205 15:00:04.385896 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1205 15:00:04.385902 6928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initializa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T15:00:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.705093 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.705178 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.705196 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.705217 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.705231 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.720292 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.731392 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.742132 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.753483 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b5fbbdb-04ab-4562-a173-3e9e90303274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c876f3a0a8f04321a7811abc93079381e36b22b7757233f72a40ddc96858bed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.768219 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.780703 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.792729 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.806405 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.811533 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.811562 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.811571 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.811589 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.811598 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.821278 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.832443 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.843798 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.854800 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:05Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.913436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.913473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.913509 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.913523 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:05 crc kubenswrapper[4840]: I1205 15:00:05.913533 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:05Z","lastTransitionTime":"2025-12-05T15:00:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.015734 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.015788 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.015804 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.015823 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.015838 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.066726 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.066778 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.066864 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.066778 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.066975 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.067039 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.100410 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.100492 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.100521 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.100631 4840 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.100705 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.100692836 +0000 UTC m=+148.441755450 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.100852 4840 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.100975 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.100961635 +0000 UTC m=+148.442024249 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.101164 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.101192 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.101206 4840 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.101251 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.101241834 +0000 UTC m=+148.442304448 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.118016 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.118056 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.118066 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.118082 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.118093 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.201610 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.201704 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.201853 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.201908 4840 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.201918 4840 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.201957 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.201920228 +0000 UTC m=+148.542982872 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.202010 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.2019962 +0000 UTC m=+148.543058854 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.219969 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.220054 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.220145 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.220182 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.220203 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.326039 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.326098 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.326121 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.326150 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.326171 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.327492 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.327539 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.327556 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.327576 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.327591 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.344271 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:06Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.349945 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.350006 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.350031 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.350059 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.350082 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.371119 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:06Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.375529 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.375585 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.375603 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.375624 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.375644 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.390155 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:06Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.393936 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.393975 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.393986 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.394001 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.394010 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.408217 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:06Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.412176 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.412223 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.412234 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.412254 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.412267 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.432384 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:06Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:06 crc kubenswrapper[4840]: E1205 15:00:06.432603 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.435110 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.435145 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.435158 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.435174 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.435187 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.538171 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.538233 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.538250 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.538273 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.538290 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.599156 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/3.log" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.641516 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.641595 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.641621 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.641654 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.641678 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.744435 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.744505 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.744520 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.744545 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.744564 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.847370 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.847468 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.847491 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.847517 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.847538 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.950113 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.950151 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.950163 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.950179 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:06 crc kubenswrapper[4840]: I1205 15:00:06.950191 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:06Z","lastTransitionTime":"2025-12-05T15:00:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.052122 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.052167 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.052176 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.052189 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.052199 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.066754 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:07 crc kubenswrapper[4840]: E1205 15:00:07.066936 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.155096 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.155137 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.155151 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.155168 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.155182 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.258124 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.258160 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.258171 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.258188 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.258200 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.360976 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.361029 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.361061 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.361080 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.361091 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.463934 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.463975 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.463985 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.463997 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.464007 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.565839 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.565891 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.565902 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.565915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.565926 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.668163 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.668209 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.668220 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.668236 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.668252 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.771225 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.771278 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.771294 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.771317 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.771333 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.874531 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.874593 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.874607 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.874624 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.874637 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.976738 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.976808 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.976832 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.976860 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:07 crc kubenswrapper[4840]: I1205 15:00:07.976923 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:07Z","lastTransitionTime":"2025-12-05T15:00:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.065887 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.065966 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.066022 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:08 crc kubenswrapper[4840]: E1205 15:00:08.066080 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:08 crc kubenswrapper[4840]: E1205 15:00:08.066277 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:08 crc kubenswrapper[4840]: E1205 15:00:08.066507 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.079072 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.079118 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.079131 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.079148 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.079161 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.181994 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.182044 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.182060 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.182084 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.182103 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.284791 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.284840 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.284856 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.284900 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.284916 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.388032 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.388081 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.388092 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.388110 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.388122 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.491375 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.491442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.491464 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.491490 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.491510 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.594543 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.594598 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.594614 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.594633 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.594646 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.697701 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.697783 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.697808 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.697841 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.697870 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.800114 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.800144 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.800153 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.800164 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.800173 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.903010 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.903074 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.903090 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.903105 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:08 crc kubenswrapper[4840]: I1205 15:00:08.903115 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:08Z","lastTransitionTime":"2025-12-05T15:00:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.004941 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.004974 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.004984 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.004997 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.005006 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.066146 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:09 crc kubenswrapper[4840]: E1205 15:00:09.066275 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.107343 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.107394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.107404 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.107418 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.107427 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.210375 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.210412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.210421 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.210435 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.210445 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.313533 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.313609 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.313620 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.313638 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.313649 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.415900 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.415948 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.415963 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.415983 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.415997 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.519300 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.519352 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.519368 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.519390 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.519411 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.622652 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.622717 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.622733 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.622755 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.622770 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.725554 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.725592 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.725602 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.725621 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.725631 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.828431 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.828476 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.828503 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.828529 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.828545 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.931088 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.931162 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.931185 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.931213 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:09 crc kubenswrapper[4840]: I1205 15:00:09.931239 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:09Z","lastTransitionTime":"2025-12-05T15:00:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.033604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.033653 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.033698 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.033716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.033728 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.066374 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.066431 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:10 crc kubenswrapper[4840]: E1205 15:00:10.066590 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.066628 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:10 crc kubenswrapper[4840]: E1205 15:00:10.066792 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:10 crc kubenswrapper[4840]: E1205 15:00:10.066966 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.136161 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.136211 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.136223 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.136240 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.136253 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.239542 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.239584 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.239595 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.239609 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.239618 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.346921 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.346966 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.346982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.346999 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.347011 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.449527 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.449582 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.449596 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.449612 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.449622 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.552774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.552857 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.552934 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.552966 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.552982 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.656376 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.656492 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.656507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.656525 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.656539 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.759360 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.759396 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.759406 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.759419 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.759428 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.862281 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.862341 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.862355 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.862372 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.862394 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.964560 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.964596 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.964605 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.964617 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:10 crc kubenswrapper[4840]: I1205 15:00:10.964630 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:10Z","lastTransitionTime":"2025-12-05T15:00:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.065645 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:11 crc kubenswrapper[4840]: E1205 15:00:11.065781 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.067247 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.067282 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.067294 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.067309 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.067323 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.169548 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.169609 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.169626 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.169647 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.169662 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.271999 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.272064 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.272086 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.272107 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.272122 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.375315 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.375394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.375416 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.375445 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.375466 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.478190 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.478256 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.478272 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.478296 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.478318 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.581281 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.581362 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.581375 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.581407 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.581422 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.684900 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.684949 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.684958 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.684973 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.684985 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.787004 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.787055 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.787067 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.787085 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.787096 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.889842 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.889919 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.889931 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.889947 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.889958 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.993163 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.993240 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.993281 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.993313 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:11 crc kubenswrapper[4840]: I1205 15:00:11.993335 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:11Z","lastTransitionTime":"2025-12-05T15:00:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.066459 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.066604 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:12 crc kubenswrapper[4840]: E1205 15:00:12.066668 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.066481 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:12 crc kubenswrapper[4840]: E1205 15:00:12.066738 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:12 crc kubenswrapper[4840]: E1205 15:00:12.066853 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.081328 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b5fbbdb-04ab-4562-a173-3e9e90303274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c876f3a0a8f04321a7811abc93079381e36b22b7757233f72a40ddc96858bed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.096405 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.096469 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.096509 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.096525 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.096547 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.096562 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.108236 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.119438 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.131480 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.147569 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.165203 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.177601 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.187380 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.197783 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.199002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.199028 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.199036 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.199050 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.199061 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.213185 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.221905 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.232115 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.242854 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.255238 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.273774 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02ce9423f51dd0bad2b3d0afef0cea370294f79d55fc2ede0bfb11fce61493da\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:31Z\\\",\\\"message\\\":\\\"-kubernetes/ovnkube-node-czvxk in node crc\\\\nI1205 14:59:31.532223 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-czvxk after 0 failed attempt(s)\\\\nI1205 14:59:31.532234 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-czvxk\\\\nI1205 14:59:31.530225 6498 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 14:59:31.532257 6498 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 in node crc\\\\nI1205 14:59:31.532266 6498 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5 after 0 failed attempt(s)\\\\nI1205 14:59:31.532275 6498 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nF1205 14:59:31.530611 6498 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to star\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T15:00:04Z\\\",\\\"message\\\":\\\"ck-source-55646444c4-trplf\\\\nI1205 15:00:04.385812 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 15:00:04.385818 6928 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 15:00:04.385818 6928 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 15:00:04.385819 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 15:00:04.385839 6928 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 15:00:04.385851 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1205 15:00:04.385896 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1205 15:00:04.385902 6928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initializa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T15:00:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.286738 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.298783 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:12Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.302278 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.302307 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.302316 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.302330 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.302339 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.405309 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.405385 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.405412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.405443 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.405466 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.508313 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.508372 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.508390 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.508416 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.508434 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.611784 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.611840 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.611859 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.611918 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.611943 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.714120 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.714162 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.714172 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.714190 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.714201 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.817184 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.817218 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.817229 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.817242 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.817253 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.919254 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.919306 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.919318 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.919338 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:12 crc kubenswrapper[4840]: I1205 15:00:12.919348 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:12Z","lastTransitionTime":"2025-12-05T15:00:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.022149 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.022196 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.022208 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.022224 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.022236 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.065844 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:13 crc kubenswrapper[4840]: E1205 15:00:13.066109 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.125220 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.125329 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.125364 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.125394 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.125417 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.227923 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.227966 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.227975 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.227989 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.227998 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.330164 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.330210 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.330225 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.330242 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.330253 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.433064 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.433140 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.433158 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.433183 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.433208 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.536390 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.536434 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.536445 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.536461 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.536476 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.638622 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.638661 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.638669 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.638750 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.638759 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.741067 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.741126 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.741139 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.741155 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.741167 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.843928 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.844007 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.844030 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.844061 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.844083 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.946652 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.946768 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.946797 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.946841 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:13 crc kubenswrapper[4840]: I1205 15:00:13.946919 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:13Z","lastTransitionTime":"2025-12-05T15:00:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.049390 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.049442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.049453 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.049470 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.049482 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.066453 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.067079 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.067030 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:14 crc kubenswrapper[4840]: E1205 15:00:14.067249 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:14 crc kubenswrapper[4840]: E1205 15:00:14.067386 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:14 crc kubenswrapper[4840]: E1205 15:00:14.067470 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.151853 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.151927 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.151937 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.151953 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.151963 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.254425 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.254459 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.254468 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.254480 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.254491 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.359241 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.359301 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.359317 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.359340 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.359357 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.462262 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.462322 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.462332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.462350 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.462363 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.565550 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.565593 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.565604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.565620 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.565632 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.668921 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.668968 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.668982 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.668999 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.669010 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.771585 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.771652 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.771670 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.771698 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.771714 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.874087 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.874126 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.874138 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.874152 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.874162 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.976243 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.976282 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.976293 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.976308 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:14 crc kubenswrapper[4840]: I1205 15:00:14.976319 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:14Z","lastTransitionTime":"2025-12-05T15:00:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.066307 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:15 crc kubenswrapper[4840]: E1205 15:00:15.066464 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.079050 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.079105 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.079119 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.079138 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.079150 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.182180 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.182295 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.182315 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.182339 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.182359 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.285262 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.285302 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.285314 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.285329 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.285339 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.388087 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.388162 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.388186 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.388217 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.388240 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.493306 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.493377 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.493401 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.493428 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.493448 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.596813 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.596918 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.596958 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.596994 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.597015 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.700120 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.700176 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.700192 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.700215 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.700233 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.802847 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.802946 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.802968 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.802995 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.803018 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.905529 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.905584 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.905595 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.905610 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:15 crc kubenswrapper[4840]: I1205 15:00:15.905623 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:15Z","lastTransitionTime":"2025-12-05T15:00:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.008782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.008851 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.008900 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.008931 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.008955 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.066479 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.066519 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.066858 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.067106 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.067305 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.067446 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.111297 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.111352 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.111371 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.111397 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.111412 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.214183 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.214262 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.214284 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.214324 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.214348 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.316946 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.316984 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.316993 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.317006 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.317016 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.420074 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.420714 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.420742 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.420773 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.420795 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.524055 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.524103 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.524136 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.524154 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.524167 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.626678 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.626741 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.626775 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.626798 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.626816 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.640040 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.640079 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.640090 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.640105 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.640116 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.658179 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:16Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.662227 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.662271 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.662285 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.662302 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.662313 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.679121 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:16Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.683479 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.683533 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.683546 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.683574 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.683587 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.700443 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:16Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.706026 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.706057 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.706065 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.706078 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.706087 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.718096 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:16Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.722248 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.722282 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.722292 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.722305 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.722315 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.736696 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T15:00:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a7829ded-3e6f-45e4-9424-5c4c1fcf0ebc\\\",\\\"systemUUID\\\":\\\"7c52e96b-a430-4f06-ad5e-bc57a22eeb52\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:16Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:16 crc kubenswrapper[4840]: E1205 15:00:16.736907 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.738516 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.738571 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.738597 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.738624 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.738643 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.841798 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.841847 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.841857 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.841902 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.841913 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.944531 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.944579 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.944588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.944606 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:16 crc kubenswrapper[4840]: I1205 15:00:16.944617 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:16Z","lastTransitionTime":"2025-12-05T15:00:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.047413 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.047473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.047487 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.047510 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.047523 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.065734 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:17 crc kubenswrapper[4840]: E1205 15:00:17.065998 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.150858 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.150940 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.150959 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.150985 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.151003 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.253890 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.253963 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.253978 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.253997 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.254008 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.357235 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.357288 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.357328 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.357346 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.357358 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.459828 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.459915 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.459933 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.459950 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.459963 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.562068 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.562106 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.562117 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.562134 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.562144 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.665727 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.665790 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.665805 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.665826 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.665846 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.768684 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.768731 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.768775 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.768788 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.768798 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.871117 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.871161 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.871179 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.871201 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.871216 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.973731 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.973794 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.973815 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.973849 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:17 crc kubenswrapper[4840]: I1205 15:00:17.973908 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:17Z","lastTransitionTime":"2025-12-05T15:00:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.066733 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.066774 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.066789 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:18 crc kubenswrapper[4840]: E1205 15:00:18.066985 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:18 crc kubenswrapper[4840]: E1205 15:00:18.067077 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:18 crc kubenswrapper[4840]: E1205 15:00:18.067189 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.075787 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.075816 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.075825 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.075836 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.075846 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.177844 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.177913 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.177926 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.177942 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.177954 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.280279 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.280374 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.280392 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.280415 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.280431 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.383200 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.383291 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.383310 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.383338 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.383361 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.488897 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.488975 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.488989 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.489017 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.489028 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.591697 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.591774 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.591798 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.591828 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.591852 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.694539 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.694667 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.694685 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.694710 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.694728 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.798174 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.798232 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.798242 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.798262 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.798274 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.901214 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.901317 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.901823 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.901948 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:18 crc kubenswrapper[4840]: I1205 15:00:18.902280 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:18Z","lastTransitionTime":"2025-12-05T15:00:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.004777 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.004820 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.004836 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.004859 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.004897 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.066029 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:19 crc kubenswrapper[4840]: E1205 15:00:19.066300 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.107158 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.107205 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.107248 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.107267 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.107280 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.209708 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.209743 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.209752 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.209786 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.209796 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.312852 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.312924 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.312937 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.312953 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.312963 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.416467 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.416517 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.416528 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.416550 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.416563 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.518966 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.519002 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.519013 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.519028 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.519039 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.548827 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:19 crc kubenswrapper[4840]: E1205 15:00:19.549134 4840 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 15:00:19 crc kubenswrapper[4840]: E1205 15:00:19.549258 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs podName:f5cf5212-af00-4788-ad5f-ff824fea7c0f nodeName:}" failed. No retries permitted until 2025-12-05 15:01:23.549231741 +0000 UTC m=+161.890294385 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs") pod "network-metrics-daemon-gn7qq" (UID: "f5cf5212-af00-4788-ad5f-ff824fea7c0f") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.622144 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.622203 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.622222 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.622239 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.622251 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.724224 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.724256 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.724267 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.724282 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.724291 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.827148 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.827187 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.827195 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.827209 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.827219 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.929139 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.929206 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.929220 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.929237 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:19 crc kubenswrapper[4840]: I1205 15:00:19.929248 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:19Z","lastTransitionTime":"2025-12-05T15:00:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.031805 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.031904 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.031929 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.031957 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.031976 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.066642 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.067202 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:20 crc kubenswrapper[4840]: E1205 15:00:20.067336 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.067409 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:20 crc kubenswrapper[4840]: E1205 15:00:20.067558 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.067757 4840 scope.go:117] "RemoveContainer" containerID="138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616" Dec 05 15:00:20 crc kubenswrapper[4840]: E1205 15:00:20.068050 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:20 crc kubenswrapper[4840]: E1205 15:00:20.068079 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.088224 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.121672 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T15:00:04Z\\\",\\\"message\\\":\\\"ck-source-55646444c4-trplf\\\\nI1205 15:00:04.385812 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 15:00:04.385818 6928 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 15:00:04.385818 6928 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 15:00:04.385819 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 15:00:04.385839 6928 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 15:00:04.385851 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1205 15:00:04.385896 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1205 15:00:04.385902 6928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initializa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T15:00:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.134848 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.134901 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.134910 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.134952 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.134974 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.137175 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.147633 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.158663 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.169586 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b5fbbdb-04ab-4562-a173-3e9e90303274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c876f3a0a8f04321a7811abc93079381e36b22b7757233f72a40ddc96858bed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.189145 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4176cc46226c939596f0d7a176f7b0dc5b612d3d5619d7234b89595d8a3de2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.200748 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c51b095457ad3e6f0853016d20a3f56341842097eced0de900b2593dc11c51b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.215483 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.234182 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"78a40d7a-9ba7-4a35-8263-6faf0ca9d52e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74bed57f49ed70a863486ca76f84e630faf8326af37407114b0393c4dd25c927\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4178e644d2bb64be074d562295956351ed4555d11df0d4142b069b1559ee2e7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://65c6bfbbc1cf4c56a2390949b70e953ec7b00656371b358a3ea27efd7705ce5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c42a15502c2334c7cb0446980d6f5ed3ea5a0fca855c5436d0dde8234626dfc0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4549965a6675145c366291f74f4ffe9d9b72a6105c30b353ca4d4972d166e70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1dc743942ab119248a368a664f566064d40d0c7b407045cf86c6d57bbe18043\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c43746577e64a77cc95c3ddfb7593067e2a6d09bc79acb78d5c595978ca0174\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dcpsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nt6vw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.237213 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.237249 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.237263 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.237280 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.237291 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.243716 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hnqdw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eefb1b71-2f32-4847-b8eb-27dcce2a320d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8cc5836bf7303c7a27c1db94a35ad69376fb90e227ce98349cec0040e8e6e26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdnc4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hnqdw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.253827 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a03adad7-7e03-4bc8-9a48-98dff0e91cc9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d45af504d931633ab2ddeb4f17089aac673eb4138e1fc5aea7024564ff5836ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3f4003c17203d33aa7ebf592c1131f76243cad6db0fe14120c2b692bbc6077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ddcrb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:14Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tszn5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.268303 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.280435 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5cf5212-af00-4788-ad5f-ff824fea7c0f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:15Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ctzwh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:15Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gn7qq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.293080 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.304333 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cbq2s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca450a54-be29-4a30-9f3c-672b824176e6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df29d23ae1ea4b05717e2a2db5c1e0b8e77ef908e896fd552e7645a6539821d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cr9mt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cbq2s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.321813 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"810c38e3-e0d7-4b5c-9c12-5847a5b81a3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T14:59:01Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 14:58:55.932494 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 14:58:55.936425 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1983193948/tls.crt::/tmp/serving-cert-1983193948/tls.key\\\\\\\"\\\\nI1205 14:59:01.224844 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 14:59:01.227693 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 14:59:01.227714 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 14:59:01.227743 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 14:59:01.227749 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 14:59:01.236282 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 14:59:01.236316 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236323 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 14:59:01.236328 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 14:59:01.236331 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 14:59:01.236334 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 14:59:01.236337 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 14:59:01.236391 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 14:59:01.238652 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.334714 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3db868ca-580e-4889-9a2c-2081eeb79c17\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78f2b2639f06790ea202ebae5c62727c69c79cea089a6cc0c7283f7841212fba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec0347453e48f1d4bdf1da4ffb3b394d7d697e6d9b43fee98896583a3569ea88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d88e9601231d4721c1479177f52c13eb3f2955a99a1ed97ef39b5a02562250e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:20Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.339360 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.339389 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.339397 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.339409 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.339418 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.441674 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.441716 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.441730 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.441747 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.441761 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.544079 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.544151 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.544181 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.544209 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.544233 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.646832 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.646912 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.646926 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.646943 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.646955 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.749835 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.749924 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.749945 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.749967 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.749985 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.852553 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.852613 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.852630 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.852652 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.852670 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.954738 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.954784 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.954803 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.954825 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:20 crc kubenswrapper[4840]: I1205 15:00:20.954842 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:20Z","lastTransitionTime":"2025-12-05T15:00:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.058969 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.059046 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.059065 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.059093 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.059114 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.066021 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:21 crc kubenswrapper[4840]: E1205 15:00:21.066153 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.161658 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.161712 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.161722 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.161737 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.161765 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.266265 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.266332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.266351 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.266377 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.266397 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.368811 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.368895 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.368913 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.368939 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.368956 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.471771 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.471827 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.471850 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.471913 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.471936 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.575255 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.575332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.575357 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.575387 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.575408 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.677995 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.678033 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.678043 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.678058 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.678067 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.780369 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.780425 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.780442 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.780464 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.780480 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.882895 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.882965 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.882977 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.882995 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.883031 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.985424 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.985485 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.985501 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.985524 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:21 crc kubenswrapper[4840]: I1205 15:00:21.985544 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:21Z","lastTransitionTime":"2025-12-05T15:00:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.066642 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.066663 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.066910 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:22 crc kubenswrapper[4840]: E1205 15:00:22.067054 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:22 crc kubenswrapper[4840]: E1205 15:00:22.067172 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:22 crc kubenswrapper[4840]: E1205 15:00:22.067274 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.088729 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.088770 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.088782 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.088801 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.088816 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.089328 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9a432c2-725d-46c6-963e-68a99ba35c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T15:00:04Z\\\",\\\"message\\\":\\\"ck-source-55646444c4-trplf\\\\nI1205 15:00:04.385812 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 15:00:04.385818 6928 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nI1205 15:00:04.385818 6928 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 15:00:04.385819 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5\\\\nI1205 15:00:04.385839 6928 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1205 15:00:04.385851 6928 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI1205 15:00:04.385896 6928 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nF1205 15:00:04.385902 6928 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initializa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T15:00:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jbq6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-czvxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:22Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.110387 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-q8pn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ffd91a64-4156-418d-8348-1efa3563e904\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T14:59:49Z\\\",\\\"message\\\":\\\"2025-12-05T14:59:03+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f\\\\n2025-12-05T14:59:03+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3df2e96f-ba4e-4dae-bc72-0304b6d7c01f to /host/opt/cni/bin/\\\\n2025-12-05T14:59:04Z [verbose] multus-daemon started\\\\n2025-12-05T14:59:04Z [verbose] Readiness Indicator file check\\\\n2025-12-05T14:59:49Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9z85z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-multus\"/\"multus-q8pn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:22Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.125037 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3032bf4d-2317-4cc8-9117-9cb879a42db0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c057d4b05d7f3d5366be3d427eb21f50da128fafa496ba450352080fe1e93108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7586294d68d4dc8b5e9fd6ed9c807d9086d3c5b5690583b353e966af65b7ec2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb83899c7f1ab6ad9c37b9a9760c060049c4868ed04fcd1939553e569d05db12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2afb7470dc373687b70f782f62f75c46f03b1a2c104ee1b3da6f80f3ebcd4e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:22Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.139995 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a972c8d4-fbab-487f-a2b7-782c3195d1ef\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb84721a9ae5ea6d969adb6a1bbe1e48e77f6332389aef30c7aa683a8787e2ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ck74c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:59:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-xxvfs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:22Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.158027 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T14:59:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03a3b0fdfc4d8cfa8ba75ca89fa6c20d8676d91e0c8b45c2e9469c9e479c618a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb358632d38546653be8353f769a69707c85057fb08cfaf68e148f2c65e0b00b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:59:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:22Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.177556 4840 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b5fbbdb-04ab-4562-a173-3e9e90303274\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T14:58:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c876f3a0a8f04321a7811abc93079381e36b22b7757233f72a40ddc96858bed7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T14:58:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c7b98370b2f34f4e0e0f6b126da9d2be265612e6ffc8bb86fde3c7a26c4a5f3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T14:58:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T14:58:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T14:58:42Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T15:00:22Z is after 2025-08-24T17:21:41Z" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.190562 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.190594 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.190604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.190619 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.190630 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.244928 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-hnqdw" podStartSLOduration=81.244913623 podStartE2EDuration="1m21.244913623s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:22.244617744 +0000 UTC m=+100.585680348" watchObservedRunningTime="2025-12-05 15:00:22.244913623 +0000 UTC m=+100.585976237" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.245294 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-nt6vw" podStartSLOduration=81.245287255 podStartE2EDuration="1m21.245287255s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:22.23186032 +0000 UTC m=+100.572922954" watchObservedRunningTime="2025-12-05 15:00:22.245287255 +0000 UTC m=+100.586349869" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.258275 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tszn5" podStartSLOduration=81.258255655 podStartE2EDuration="1m21.258255655s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:22.257647656 +0000 UTC m=+100.598710290" watchObservedRunningTime="2025-12-05 15:00:22.258255655 +0000 UTC m=+100.599318269" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.294346 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.294383 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.294391 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.294403 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.294412 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.309477 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-cbq2s" podStartSLOduration=81.309455615 podStartE2EDuration="1m21.309455615s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:22.309356361 +0000 UTC m=+100.650418985" watchObservedRunningTime="2025-12-05 15:00:22.309455615 +0000 UTC m=+100.650518229" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.338453 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=80.338434231 podStartE2EDuration="1m20.338434231s" podCreationTimestamp="2025-12-05 14:59:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:22.325980927 +0000 UTC m=+100.667043541" watchObservedRunningTime="2025-12-05 15:00:22.338434231 +0000 UTC m=+100.679496845" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.353240 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=80.353222959 podStartE2EDuration="1m20.353222959s" podCreationTimestamp="2025-12-05 14:59:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:22.339639109 +0000 UTC m=+100.680701723" watchObservedRunningTime="2025-12-05 15:00:22.353222959 +0000 UTC m=+100.694285573" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.396812 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.396837 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.396844 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.396856 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.396883 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.498677 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.498726 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.498746 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.498762 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.498772 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.602436 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.602471 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.602479 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.602492 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.602501 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.705357 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.705424 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.705441 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.705462 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.705475 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.807960 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.808003 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.808017 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.808039 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.808055 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.911222 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.911267 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.911279 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.911299 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:22 crc kubenswrapper[4840]: I1205 15:00:22.911322 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:22Z","lastTransitionTime":"2025-12-05T15:00:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.014149 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.014199 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.014211 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.014233 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.014247 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.066482 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:23 crc kubenswrapper[4840]: E1205 15:00:23.066715 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.117706 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.117753 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.117764 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.117788 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.117813 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.220588 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.220630 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.220639 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.220653 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.220663 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.325174 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.325275 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.325293 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.325323 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.325352 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.427939 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.427990 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.428007 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.428023 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.428035 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.530149 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.530197 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.530207 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.530220 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.530229 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.634054 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.634092 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.634103 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.634118 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.634130 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.736802 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.736854 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.736884 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.736902 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.736918 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.839579 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.839701 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.839722 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.839742 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.839756 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.943078 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.943234 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.943268 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.943297 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:23 crc kubenswrapper[4840]: I1205 15:00:23.943319 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:23Z","lastTransitionTime":"2025-12-05T15:00:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.047236 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.047329 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.047355 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.047386 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.047406 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.066739 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:24 crc kubenswrapper[4840]: E1205 15:00:24.067617 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.067162 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.067102 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:24 crc kubenswrapper[4840]: E1205 15:00:24.067756 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:24 crc kubenswrapper[4840]: E1205 15:00:24.067993 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.150276 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.150347 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.150365 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.150387 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.150402 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.253015 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.253060 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.253071 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.253090 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.253102 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.355660 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.355721 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.355740 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.355761 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.355776 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.458412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.458472 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.458493 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.458519 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.458540 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.561484 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.561553 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.561574 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.561598 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.561615 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.664282 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.664311 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.664320 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.664332 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.664341 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.766781 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.767018 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.767176 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.767279 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.767306 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.870375 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.870407 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.870419 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.870435 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.870509 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.973028 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.973082 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.973098 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.973127 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:24 crc kubenswrapper[4840]: I1205 15:00:24.973148 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:24Z","lastTransitionTime":"2025-12-05T15:00:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.066124 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:25 crc kubenswrapper[4840]: E1205 15:00:25.066354 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.076328 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.076353 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.076360 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.076371 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.076380 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.179101 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.179131 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.179140 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.179153 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.179164 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.282368 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.282462 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.282497 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.282526 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.282547 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.384896 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.384967 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.384989 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.385020 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.385049 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.487779 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.487835 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.487852 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.487906 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.487930 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.590113 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.590149 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.590159 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.590171 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.590180 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.693948 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.694014 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.694035 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.694063 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.694084 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.797257 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.797316 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.797336 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.797360 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.797377 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.900153 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.900208 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.900224 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.900247 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:25 crc kubenswrapper[4840]: I1205 15:00:25.900264 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:25Z","lastTransitionTime":"2025-12-05T15:00:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.002813 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.003243 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.003412 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.003559 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.003677 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.066146 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.066238 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:26 crc kubenswrapper[4840]: E1205 15:00:26.066271 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.066154 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:26 crc kubenswrapper[4840]: E1205 15:00:26.066372 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:26 crc kubenswrapper[4840]: E1205 15:00:26.066567 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.105563 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.105610 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.105619 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.105633 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.105642 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.208396 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.208441 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.208452 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.208472 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.208483 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.311342 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.311405 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.311428 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.311457 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.311478 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.414393 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.414445 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.414456 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.414473 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.414486 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.517015 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.517075 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.517092 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.517115 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.517134 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.621405 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.621445 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.621456 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.621472 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.621482 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.724311 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.724347 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.724358 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.724373 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.724384 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.826456 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.826521 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.826543 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.826570 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.826593 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.929505 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.929560 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.929576 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.929604 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:26 crc kubenswrapper[4840]: I1205 15:00:26.929625 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:26Z","lastTransitionTime":"2025-12-05T15:00:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.032339 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.032384 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.032399 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.032416 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.032428 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:27Z","lastTransitionTime":"2025-12-05T15:00:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.066650 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:27 crc kubenswrapper[4840]: E1205 15:00:27.066923 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.090229 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.098507 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.098555 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.098573 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.098595 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.098613 4840 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T15:00:27Z","lastTransitionTime":"2025-12-05T15:00:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.160422 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv"] Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.160997 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.163320 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.163545 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.163751 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.164468 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.196618 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=34.196598827 podStartE2EDuration="34.196598827s" podCreationTimestamp="2025-12-05 14:59:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:27.172755727 +0000 UTC m=+105.513818351" watchObservedRunningTime="2025-12-05 15:00:27.196598827 +0000 UTC m=+105.537661441" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.196732 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=0.196727711 podStartE2EDuration="196.727711ms" podCreationTimestamp="2025-12-05 15:00:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:27.195763933 +0000 UTC m=+105.536826547" watchObservedRunningTime="2025-12-05 15:00:27.196727711 +0000 UTC m=+105.537790325" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.214200 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=57.214164718 podStartE2EDuration="57.214164718s" podCreationTimestamp="2025-12-05 14:59:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:27.213137729 +0000 UTC m=+105.554200343" watchObservedRunningTime="2025-12-05 15:00:27.214164718 +0000 UTC m=+105.555227372" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.229600 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.229674 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.229811 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.229928 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.229995 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.230691 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podStartSLOduration=86.230638228 podStartE2EDuration="1m26.230638228s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:27.227843548 +0000 UTC m=+105.568906192" watchObservedRunningTime="2025-12-05 15:00:27.230638228 +0000 UTC m=+105.571700882" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.287987 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-q8pn7" podStartSLOduration=86.287947123 podStartE2EDuration="1m26.287947123s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:27.286117871 +0000 UTC m=+105.627180525" watchObservedRunningTime="2025-12-05 15:00:27.287947123 +0000 UTC m=+105.629009767" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.330668 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.330733 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.330780 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.330800 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.330817 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.330988 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.331093 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.332103 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.340293 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.351520 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xqxzv\" (UID: \"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.475670 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" Dec 05 15:00:27 crc kubenswrapper[4840]: W1205 15:00:27.494013 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a5d4696_efb3_461e_a9ef_a9c1f6fa3b7a.slice/crio-a5f639c931ccfc96a4702f195490c0cfe02fbfa437d1ab6d0144cabc266359fa WatchSource:0}: Error finding container a5f639c931ccfc96a4702f195490c0cfe02fbfa437d1ab6d0144cabc266359fa: Status 404 returned error can't find the container with id a5f639c931ccfc96a4702f195490c0cfe02fbfa437d1ab6d0144cabc266359fa Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.683597 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" event={"ID":"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a","Type":"ContainerStarted","Data":"9c74d3d76e75e4b01d71a8ec4a8c4fa3c0e57cb5fcc5e6ca1f8f21cbea025281"} Dec 05 15:00:27 crc kubenswrapper[4840]: I1205 15:00:27.683637 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" event={"ID":"8a5d4696-efb3-461e-a9ef-a9c1f6fa3b7a","Type":"ContainerStarted","Data":"a5f639c931ccfc96a4702f195490c0cfe02fbfa437d1ab6d0144cabc266359fa"} Dec 05 15:00:28 crc kubenswrapper[4840]: I1205 15:00:28.066340 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:28 crc kubenswrapper[4840]: I1205 15:00:28.066419 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:28 crc kubenswrapper[4840]: E1205 15:00:28.066459 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:28 crc kubenswrapper[4840]: E1205 15:00:28.066563 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:28 crc kubenswrapper[4840]: I1205 15:00:28.067035 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:28 crc kubenswrapper[4840]: E1205 15:00:28.067160 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:29 crc kubenswrapper[4840]: I1205 15:00:29.066336 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:29 crc kubenswrapper[4840]: E1205 15:00:29.066529 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:30 crc kubenswrapper[4840]: I1205 15:00:30.066490 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:30 crc kubenswrapper[4840]: I1205 15:00:30.066606 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:30 crc kubenswrapper[4840]: I1205 15:00:30.066527 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:30 crc kubenswrapper[4840]: E1205 15:00:30.066686 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:30 crc kubenswrapper[4840]: E1205 15:00:30.066742 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:30 crc kubenswrapper[4840]: E1205 15:00:30.066944 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:31 crc kubenswrapper[4840]: I1205 15:00:31.066096 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:31 crc kubenswrapper[4840]: E1205 15:00:31.066215 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:32 crc kubenswrapper[4840]: I1205 15:00:32.066724 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:32 crc kubenswrapper[4840]: I1205 15:00:32.066734 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:32 crc kubenswrapper[4840]: E1205 15:00:32.067730 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:32 crc kubenswrapper[4840]: I1205 15:00:32.067745 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:32 crc kubenswrapper[4840]: E1205 15:00:32.067832 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:32 crc kubenswrapper[4840]: E1205 15:00:32.067900 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:32 crc kubenswrapper[4840]: I1205 15:00:32.068554 4840 scope.go:117] "RemoveContainer" containerID="138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616" Dec 05 15:00:32 crc kubenswrapper[4840]: E1205 15:00:32.068726 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-czvxk_openshift-ovn-kubernetes(e9a432c2-725d-46c6-963e-68a99ba35c89)\"" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" Dec 05 15:00:33 crc kubenswrapper[4840]: I1205 15:00:33.067784 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:33 crc kubenswrapper[4840]: E1205 15:00:33.067956 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:34 crc kubenswrapper[4840]: I1205 15:00:34.066806 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:34 crc kubenswrapper[4840]: E1205 15:00:34.066961 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:34 crc kubenswrapper[4840]: I1205 15:00:34.067211 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:34 crc kubenswrapper[4840]: I1205 15:00:34.067262 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:34 crc kubenswrapper[4840]: E1205 15:00:34.067345 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:34 crc kubenswrapper[4840]: E1205 15:00:34.067621 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:35 crc kubenswrapper[4840]: I1205 15:00:35.066235 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:35 crc kubenswrapper[4840]: E1205 15:00:35.066417 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.066262 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:36 crc kubenswrapper[4840]: E1205 15:00:36.066376 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.066285 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:36 crc kubenswrapper[4840]: E1205 15:00:36.066442 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.066261 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:36 crc kubenswrapper[4840]: E1205 15:00:36.066484 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.711046 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/1.log" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.711579 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/0.log" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.711733 4840 generic.go:334] "Generic (PLEG): container finished" podID="ffd91a64-4156-418d-8348-1efa3563e904" containerID="ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8" exitCode=1 Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.711796 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerDied","Data":"ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8"} Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.712027 4840 scope.go:117] "RemoveContainer" containerID="c7055e65636d5d4ea803c2a494ad564934e04c2064ff7561c9a2141b26296691" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.712524 4840 scope.go:117] "RemoveContainer" containerID="ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8" Dec 05 15:00:36 crc kubenswrapper[4840]: E1205 15:00:36.712781 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-q8pn7_openshift-multus(ffd91a64-4156-418d-8348-1efa3563e904)\"" pod="openshift-multus/multus-q8pn7" podUID="ffd91a64-4156-418d-8348-1efa3563e904" Dec 05 15:00:36 crc kubenswrapper[4840]: I1205 15:00:36.735761 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xqxzv" podStartSLOduration=95.735741668 podStartE2EDuration="1m35.735741668s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:27.704586629 +0000 UTC m=+106.045649243" watchObservedRunningTime="2025-12-05 15:00:36.735741668 +0000 UTC m=+115.076804282" Dec 05 15:00:37 crc kubenswrapper[4840]: I1205 15:00:37.066140 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:37 crc kubenswrapper[4840]: E1205 15:00:37.066385 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:37 crc kubenswrapper[4840]: I1205 15:00:37.716314 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/1.log" Dec 05 15:00:38 crc kubenswrapper[4840]: I1205 15:00:38.066001 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:38 crc kubenswrapper[4840]: I1205 15:00:38.066034 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:38 crc kubenswrapper[4840]: I1205 15:00:38.066138 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:38 crc kubenswrapper[4840]: E1205 15:00:38.066129 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:38 crc kubenswrapper[4840]: E1205 15:00:38.067246 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:38 crc kubenswrapper[4840]: E1205 15:00:38.067474 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:39 crc kubenswrapper[4840]: I1205 15:00:39.066702 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:39 crc kubenswrapper[4840]: E1205 15:00:39.066905 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:40 crc kubenswrapper[4840]: I1205 15:00:40.065691 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:40 crc kubenswrapper[4840]: I1205 15:00:40.065820 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:40 crc kubenswrapper[4840]: E1205 15:00:40.065933 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:40 crc kubenswrapper[4840]: E1205 15:00:40.066078 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:40 crc kubenswrapper[4840]: I1205 15:00:40.066227 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:40 crc kubenswrapper[4840]: E1205 15:00:40.066418 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:41 crc kubenswrapper[4840]: I1205 15:00:41.065980 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:41 crc kubenswrapper[4840]: E1205 15:00:41.066218 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:42 crc kubenswrapper[4840]: I1205 15:00:42.066689 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:42 crc kubenswrapper[4840]: I1205 15:00:42.066768 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:42 crc kubenswrapper[4840]: I1205 15:00:42.069662 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:42 crc kubenswrapper[4840]: E1205 15:00:42.069817 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:42 crc kubenswrapper[4840]: E1205 15:00:42.069914 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:42 crc kubenswrapper[4840]: E1205 15:00:42.069623 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:42 crc kubenswrapper[4840]: E1205 15:00:42.080379 4840 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 15:00:42 crc kubenswrapper[4840]: E1205 15:00:42.162356 4840 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 15:00:43 crc kubenswrapper[4840]: I1205 15:00:43.066502 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:43 crc kubenswrapper[4840]: E1205 15:00:43.067293 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:44 crc kubenswrapper[4840]: I1205 15:00:44.065974 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:44 crc kubenswrapper[4840]: E1205 15:00:44.066150 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:44 crc kubenswrapper[4840]: I1205 15:00:44.066350 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:44 crc kubenswrapper[4840]: I1205 15:00:44.066564 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:44 crc kubenswrapper[4840]: E1205 15:00:44.066668 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:44 crc kubenswrapper[4840]: E1205 15:00:44.066943 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:45 crc kubenswrapper[4840]: I1205 15:00:45.660005 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:45 crc kubenswrapper[4840]: I1205 15:00:45.660053 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:45 crc kubenswrapper[4840]: I1205 15:00:45.660184 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:45 crc kubenswrapper[4840]: I1205 15:00:45.660191 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:45 crc kubenswrapper[4840]: E1205 15:00:45.660359 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:45 crc kubenswrapper[4840]: E1205 15:00:45.660560 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:45 crc kubenswrapper[4840]: E1205 15:00:45.660674 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:45 crc kubenswrapper[4840]: E1205 15:00:45.661149 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.066147 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.066204 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:47 crc kubenswrapper[4840]: E1205 15:00:47.066335 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:47 crc kubenswrapper[4840]: E1205 15:00:47.066457 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.068074 4840 scope.go:117] "RemoveContainer" containerID="138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616" Dec 05 15:00:47 crc kubenswrapper[4840]: E1205 15:00:47.164130 4840 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.751999 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/3.log" Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.755724 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerStarted","Data":"2144a37938b23a0d2c63e5ce547c2a3b5eb0cc5976c9125c6788c1bbed102368"} Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.756305 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.928002 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podStartSLOduration=106.92797304 podStartE2EDuration="1m46.92797304s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:00:47.782434258 +0000 UTC m=+126.123496872" watchObservedRunningTime="2025-12-05 15:00:47.92797304 +0000 UTC m=+126.269035654" Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.929041 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gn7qq"] Dec 05 15:00:47 crc kubenswrapper[4840]: I1205 15:00:47.929134 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:47 crc kubenswrapper[4840]: E1205 15:00:47.929243 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:48 crc kubenswrapper[4840]: I1205 15:00:48.066339 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:48 crc kubenswrapper[4840]: E1205 15:00:48.066553 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:48 crc kubenswrapper[4840]: I1205 15:00:48.066590 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:48 crc kubenswrapper[4840]: E1205 15:00:48.066671 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:48 crc kubenswrapper[4840]: I1205 15:00:48.067009 4840 scope.go:117] "RemoveContainer" containerID="ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8" Dec 05 15:00:48 crc kubenswrapper[4840]: I1205 15:00:48.760307 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/1.log" Dec 05 15:00:48 crc kubenswrapper[4840]: I1205 15:00:48.761016 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerStarted","Data":"83f6a1b848d66de940665d1fced2d72d3aad317913ba5ab4c94c1951388c49cb"} Dec 05 15:00:49 crc kubenswrapper[4840]: I1205 15:00:49.066556 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:49 crc kubenswrapper[4840]: I1205 15:00:49.066554 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:49 crc kubenswrapper[4840]: E1205 15:00:49.066768 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:49 crc kubenswrapper[4840]: E1205 15:00:49.066903 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:50 crc kubenswrapper[4840]: I1205 15:00:50.066321 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:50 crc kubenswrapper[4840]: I1205 15:00:50.066373 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:50 crc kubenswrapper[4840]: E1205 15:00:50.066554 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:50 crc kubenswrapper[4840]: E1205 15:00:50.066684 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:51 crc kubenswrapper[4840]: I1205 15:00:51.066360 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:51 crc kubenswrapper[4840]: E1205 15:00:51.066551 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 15:00:51 crc kubenswrapper[4840]: I1205 15:00:51.066357 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:51 crc kubenswrapper[4840]: E1205 15:00:51.066698 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gn7qq" podUID="f5cf5212-af00-4788-ad5f-ff824fea7c0f" Dec 05 15:00:52 crc kubenswrapper[4840]: I1205 15:00:52.065840 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:52 crc kubenswrapper[4840]: I1205 15:00:52.065936 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:52 crc kubenswrapper[4840]: E1205 15:00:52.068914 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 15:00:52 crc kubenswrapper[4840]: E1205 15:00:52.069013 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 15:00:53 crc kubenswrapper[4840]: I1205 15:00:53.066157 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:00:53 crc kubenswrapper[4840]: I1205 15:00:53.066203 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:00:53 crc kubenswrapper[4840]: I1205 15:00:53.068825 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 15:00:53 crc kubenswrapper[4840]: I1205 15:00:53.068931 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 15:00:53 crc kubenswrapper[4840]: I1205 15:00:53.069122 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 15:00:53 crc kubenswrapper[4840]: I1205 15:00:53.070573 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 15:00:54 crc kubenswrapper[4840]: I1205 15:00:54.065767 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:00:54 crc kubenswrapper[4840]: I1205 15:00:54.065793 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:00:54 crc kubenswrapper[4840]: I1205 15:00:54.068074 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 15:00:54 crc kubenswrapper[4840]: I1205 15:00:54.068074 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.517311 4840 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.550718 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8t4w9"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.551112 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.554984 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.560744 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.560761 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.560974 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.561526 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.563806 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.564156 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.564763 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.565250 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.565798 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.566054 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.566207 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g59rm"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.566674 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.567029 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.567225 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.567476 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.567545 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9lj8m"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.568052 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.568617 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hvqs4"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.568947 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.569183 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.569216 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.570298 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.571033 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.571196 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.571683 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.572342 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.572741 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-svf7z"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.573028 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.573427 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-rbbsb"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.573837 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.574221 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.574358 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.574389 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.574425 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.574700 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576028 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576150 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576327 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576485 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pczb2"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576542 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576156 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576609 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.576797 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.577107 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.577589 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.577671 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.577858 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.577971 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.577674 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578081 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578181 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578311 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578422 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578504 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578592 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578637 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578732 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578803 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578823 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578834 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.578933 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.579517 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.579730 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.579911 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.580558 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.581716 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvb2t\" (UniqueName: \"kubernetes.io/projected/6e933683-f464-453c-8228-97b31d8b1f42-kube-api-access-mvb2t\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.581760 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-client-ca\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.581782 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.581805 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e933683-f464-453c-8228-97b31d8b1f42-serving-cert\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.581833 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-config\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.582150 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7t8bh"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.583124 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.586948 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.593892 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.594389 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.594887 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.595034 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.595095 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.595204 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.595367 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.595487 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.595564 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.595617 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.604684 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.604995 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.605076 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.605712 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.606184 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.606744 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.607527 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.607662 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.632288 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.632230 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.632445 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.632713 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.633981 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.634010 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.636038 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.636414 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.637052 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.637447 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.638715 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.639666 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xsrwv"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.640095 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qskjn"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.640408 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-zb8r7"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.640601 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.640817 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.641733 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.642270 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.645076 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.645582 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.650229 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.651093 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.651704 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.652060 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.652063 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.652651 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.652792 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.653070 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.653195 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.655686 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.662025 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.662200 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.662442 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.662456 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.662706 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.664017 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.664401 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.664483 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.670367 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8t4w9"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.671196 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.671310 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.671614 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672032 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672213 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672238 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672379 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672583 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672589 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672725 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672788 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.672908 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.673066 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.673109 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.676480 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.678657 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.681482 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-qrgwq"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.683742 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.684112 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.684375 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.684420 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-config\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.684461 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66dgq\" (UniqueName: \"kubernetes.io/projected/80ec2d65-850c-4134-b5cd-f763de4964fb-kube-api-access-66dgq\") pod \"cluster-samples-operator-665b6dd947-tqkgt\" (UID: \"80ec2d65-850c-4134-b5cd-f763de4964fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.693081 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694143 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/80ec2d65-850c-4134-b5cd-f763de4964fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tqkgt\" (UID: \"80ec2d65-850c-4134-b5cd-f763de4964fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694224 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvb2t\" (UniqueName: \"kubernetes.io/projected/6e933683-f464-453c-8228-97b31d8b1f42-kube-api-access-mvb2t\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694274 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwgsz\" (UniqueName: \"kubernetes.io/projected/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-kube-api-access-dwgsz\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694300 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694329 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694357 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694378 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-client-ca\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694416 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e933683-f464-453c-8228-97b31d8b1f42-serving-cert\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.694976 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.695042 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.695808 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-client-ca\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.697279 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.698625 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jh844"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.699148 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-config\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.699519 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.700723 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.715562 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mp7c5"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.716612 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e933683-f464-453c-8228-97b31d8b1f42-serving-cert\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.717533 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.719048 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.719610 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.721218 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-69w55"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.721464 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.722098 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.723951 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.724502 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.725010 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.725190 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.726191 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7z87w"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.726925 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.727155 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.727923 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.729063 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g59rm"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.730139 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nc87t"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.730654 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.730992 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.731627 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.733601 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.733726 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7t8bh"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.734922 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-r9h7q"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.735664 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.735666 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.738991 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9lj8m"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.739024 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.739375 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.739453 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rbbsb"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.739535 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.740352 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hvqs4"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.742458 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.742953 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.743422 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.744042 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.744429 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.744757 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.745465 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-48665"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.745925 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.746884 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pczb2"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.747748 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jh844"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.748813 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.750743 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.753733 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.754258 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.754335 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.755625 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.756877 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xsrwv"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.757927 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.758964 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.760322 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.761538 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.762981 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mp7c5"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.764361 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-svf7z"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.766088 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-r9h7q"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.766736 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zb8r7"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.768239 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.780230 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.792536 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.794242 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.795058 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/80ec2d65-850c-4134-b5cd-f763de4964fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tqkgt\" (UID: \"80ec2d65-850c-4134-b5cd-f763de4964fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.795207 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwgsz\" (UniqueName: \"kubernetes.io/projected/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-kube-api-access-dwgsz\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.795238 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.795261 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.795312 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.795355 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66dgq\" (UniqueName: \"kubernetes.io/projected/80ec2d65-850c-4134-b5cd-f763de4964fb-kube-api-access-66dgq\") pod \"cluster-samples-operator-665b6dd947-tqkgt\" (UID: \"80ec2d65-850c-4134-b5cd-f763de4964fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.798017 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.800570 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.800981 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/80ec2d65-850c-4134-b5cd-f763de4964fb-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-tqkgt\" (UID: \"80ec2d65-850c-4134-b5cd-f763de4964fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.803265 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qskjn"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.805404 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.807422 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.810621 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.811635 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-69w55"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.813225 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.813380 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.814956 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7z87w"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.816619 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.818011 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-zq9dg"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.819031 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.821445 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-fnhqw"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.822734 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.823418 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-48665"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.824634 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nc87t"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.825647 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.827000 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-fnhqw"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.827720 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-5t5n2"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.828601 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-5t5n2" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.828722 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-5t5n2"] Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.833645 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.853977 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.875109 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.913324 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.933451 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.953435 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.973770 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 15:00:57 crc kubenswrapper[4840]: I1205 15:00:57.993457 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.013151 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.034090 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.053165 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.072742 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.093426 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.113159 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.134306 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.153771 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.174086 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.209362 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvb2t\" (UniqueName: \"kubernetes.io/projected/6e933683-f464-453c-8228-97b31d8b1f42-kube-api-access-mvb2t\") pod \"controller-manager-879f6c89f-8t4w9\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.214996 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.234063 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.254100 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.274060 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.294044 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.314346 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.333479 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.354740 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.393105 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.419497 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.433762 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.452936 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.473795 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.482413 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.493878 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.514448 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.534333 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.554197 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.587503 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.593957 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.613087 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.634362 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.653548 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.672425 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8t4w9"] Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.673058 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.692660 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.713502 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.731675 4840 request.go:700] Waited for 1.009206942s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-operator-dockercfg-98p87&limit=500&resourceVersion=0 Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.736939 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.752854 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.773416 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.793018 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.798198 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" event={"ID":"6e933683-f464-453c-8228-97b31d8b1f42","Type":"ContainerStarted","Data":"3346da2f7595200e63558e8a3ac3cf474a9af10b349135903a722f4f4d9083ef"} Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.813599 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.833442 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.855168 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.873489 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.893755 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.912788 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.932859 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.954072 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 15:00:58 crc kubenswrapper[4840]: I1205 15:00:58.973478 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.000049 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.013148 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.033976 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.053223 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.074259 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.093788 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.113688 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.134013 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.152895 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.173718 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.194270 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.214269 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.234007 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.253341 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.273483 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.293622 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.313465 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.333209 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.352661 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.374004 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.393278 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.413821 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.434217 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.454041 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.491047 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66dgq\" (UniqueName: \"kubernetes.io/projected/80ec2d65-850c-4134-b5cd-f763de4964fb-kube-api-access-66dgq\") pod \"cluster-samples-operator-665b6dd947-tqkgt\" (UID: \"80ec2d65-850c-4134-b5cd-f763de4964fb\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.514680 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.530071 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.534655 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.535048 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwgsz\" (UniqueName: \"kubernetes.io/projected/7b21b2d6-8970-40a9-ad70-4ef9fed68b72-kube-api-access-dwgsz\") pod \"cluster-image-registry-operator-dc59b4c8b-hr672\" (UID: \"7b21b2d6-8970-40a9-ad70-4ef9fed68b72\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.554567 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.573678 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.593957 4840 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.615847 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.630570 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.634464 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.653952 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.674422 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.693301 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.702932 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt"] Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.732136 4840 request.go:700] Waited for 1.35108457s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/cluster-image-registry-operator-dc59b4c8b-hr672 Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.798617 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672"] Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.992097 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-tls\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:00:59 crc kubenswrapper[4840]: I1205 15:00:59.992215 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:00:59 crc kubenswrapper[4840]: E1205 15:00:59.992633 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.492610625 +0000 UTC m=+138.833673269 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.097085 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.097510 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-encryption-config\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.097605 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.097726 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-etcd-client\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.097844 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/811f6598-f603-4a15-8dec-add067d82d5c-audit-dir\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.097922 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-auth-proxy-config\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.097942 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-signing-cabundle\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098013 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0816a7-7a6a-40ac-a63a-3c26278426d8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098041 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098126 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsc77\" (UniqueName: \"kubernetes.io/projected/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-kube-api-access-xsc77\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098145 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7jnz\" (UniqueName: \"kubernetes.io/projected/d6391e1f-a04c-44ae-9528-b80530aa1d4a-kube-api-access-w7jnz\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098203 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/891a06ac-8101-4fab-a947-2adf9d8eeb7f-images\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098286 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-ca\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.098311 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.598281413 +0000 UTC m=+138.939344027 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098341 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098368 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-audit\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098479 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d87a8800-e3bd-4e0b-89c4-fe73193110fb-audit-dir\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098582 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3d9897dc-10eb-4947-b5ce-63362338dfd1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098653 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71224ef1-9751-49f0-89d6-18b5225f97cb-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098678 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4692b73d-a699-4ef5-82e8-cac30360e1b3-audit-dir\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098789 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-serving-cert\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098817 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/237e4a75-4edd-4622-87f4-03a1f620649d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.098852 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-config\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099026 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc27f\" (UniqueName: \"kubernetes.io/projected/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-kube-api-access-qc27f\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099085 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7acc3cd6-974f-4584-ac4d-d0e443052796-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099111 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/7102c26c-508b-485c-8a0e-8d35333641ce-certs\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099156 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn9fh\" (UniqueName: \"kubernetes.io/projected/237e4a75-4edd-4622-87f4-03a1f620649d-kube-api-access-cn9fh\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099190 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr8jp\" (UniqueName: \"kubernetes.io/projected/f9f44648-d5f1-49ee-a394-115e43c97fc9-kube-api-access-lr8jp\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099214 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8fa1328-3653-44b4-803e-e9c41249bc4f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099244 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-audit-policies\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099267 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3d9897dc-10eb-4947-b5ce-63362338dfd1-proxy-tls\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099299 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71224ef1-9751-49f0-89d6-18b5225f97cb-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099320 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d87a8800-e3bd-4e0b-89c4-fe73193110fb-node-pullsecrets\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099342 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-config\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099389 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099409 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5f30d9-fc2d-47dc-8662-d649023f9521-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099441 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/891a06ac-8101-4fab-a947-2adf9d8eeb7f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099515 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-trusted-ca\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099548 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099568 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a5f30d9-fc2d-47dc-8662-d649023f9521-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099588 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-etcd-client\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099610 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.099629 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-serving-cert\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.100015 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.600007492 +0000 UTC m=+138.941070106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100047 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100077 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a612c2a1-ac77-40f9-8cd9-18cf9f68b963-metrics-tls\") pod \"dns-operator-744455d44c-7t8bh\" (UID: \"a612c2a1-ac77-40f9-8cd9-18cf9f68b963\") " pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100097 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-config\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100114 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngh6l\" (UniqueName: \"kubernetes.io/projected/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-kube-api-access-ngh6l\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100132 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8fa1328-3653-44b4-803e-e9c41249bc4f-config\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100151 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891a06ac-8101-4fab-a947-2adf9d8eeb7f-config\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100166 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqmpc\" (UniqueName: \"kubernetes.io/projected/a612c2a1-ac77-40f9-8cd9-18cf9f68b963-kube-api-access-jqmpc\") pod \"dns-operator-744455d44c-7t8bh\" (UID: \"a612c2a1-ac77-40f9-8cd9-18cf9f68b963\") " pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100187 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-config\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100202 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-service-ca\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100224 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100244 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-etcd-serving-ca\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100260 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100275 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrffj\" (UniqueName: \"kubernetes.io/projected/4692b73d-a699-4ef5-82e8-cac30360e1b3-kube-api-access-xrffj\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100294 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-service-ca-bundle\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100310 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100328 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdr79\" (UniqueName: \"kubernetes.io/projected/362d5a3b-2574-499c-8142-0f3e4369f573-kube-api-access-fdr79\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.100342 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0421da7c-2216-4c4a-8422-2e3391a256b2-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101183 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xwjm\" (UniqueName: \"kubernetes.io/projected/3d9897dc-10eb-4947-b5ce-63362338dfd1-kube-api-access-6xwjm\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101201 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqxzs\" (UniqueName: \"kubernetes.io/projected/7102c26c-508b-485c-8a0e-8d35333641ce-kube-api-access-fqxzs\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101216 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-client-ca\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101232 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101248 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d6391e1f-a04c-44ae-9528-b80530aa1d4a-srv-cert\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101270 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-audit-policies\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101286 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxlfl\" (UniqueName: \"kubernetes.io/projected/811f6598-f603-4a15-8dec-add067d82d5c-kube-api-access-zxlfl\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101309 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-serving-cert\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101327 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-oauth-serving-cert\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101353 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-machine-approver-tls\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101407 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6whxk\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-kube-api-access-6whxk\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101426 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c3620f-2835-41cc-8152-40ff0eb6db8c-serving-cert\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101443 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d6391e1f-a04c-44ae-9528-b80530aa1d4a-profile-collector-cert\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101460 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9kmw\" (UniqueName: \"kubernetes.io/projected/0382f04b-cd24-421f-8bad-f147dc41bf91-kube-api-access-f9kmw\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101477 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p86cs\" (UniqueName: \"kubernetes.io/projected/7acc3cd6-974f-4584-ac4d-d0e443052796-kube-api-access-p86cs\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101495 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/7102c26c-508b-485c-8a0e-8d35333641ce-node-bootstrap-token\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101515 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5f30d9-fc2d-47dc-8662-d649023f9521-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101636 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n52nj\" (UniqueName: \"kubernetes.io/projected/96c3620f-2835-41cc-8152-40ff0eb6db8c-kube-api-access-n52nj\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101722 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-signing-key\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101840 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-serving-cert\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101880 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h24l8\" (UniqueName: \"kubernetes.io/projected/da0816a7-7a6a-40ac-a63a-3c26278426d8-kube-api-access-h24l8\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101905 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101931 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-encryption-config\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101966 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-config\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101979 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0382f04b-cd24-421f-8bad-f147dc41bf91-config\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.101999 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-tls\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102014 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-certificates\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102029 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb4mz\" (UniqueName: \"kubernetes.io/projected/891a06ac-8101-4fab-a947-2adf9d8eeb7f-kube-api-access-zb4mz\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102044 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-trusted-ca-bundle\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102089 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvlpd\" (UniqueName: \"kubernetes.io/projected/ffde146e-eb04-4056-acb2-febc2da78e46-kube-api-access-kvlpd\") pod \"downloads-7954f5f757-rbbsb\" (UID: \"ffde146e-eb04-4056-acb2-febc2da78e46\") " pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102104 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102121 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102135 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0382f04b-cd24-421f-8bad-f147dc41bf91-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102150 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-image-import-ca\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102165 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102180 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7acc3cd6-974f-4584-ac4d-d0e443052796-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102196 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0816a7-7a6a-40ac-a63a-3c26278426d8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102253 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn5g5\" (UniqueName: \"kubernetes.io/projected/d87a8800-e3bd-4e0b-89c4-fe73193110fb-kube-api-access-sn5g5\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102269 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8fa1328-3653-44b4-803e-e9c41249bc4f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102477 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-client\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102492 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-bound-sa-token\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102507 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/362d5a3b-2574-499c-8142-0f3e4369f573-serving-cert\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102521 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0421da7c-2216-4c4a-8422-2e3391a256b2-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102542 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-oauth-config\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.102559 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7f5s\" (UniqueName: \"kubernetes.io/projected/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-kube-api-access-q7f5s\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.103224 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.103272 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0421da7c-2216-4c4a-8422-2e3391a256b2-config\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.103509 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.103536 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-config\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.103563 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/237e4a75-4edd-4622-87f4-03a1f620649d-serving-cert\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.103600 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.103674 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-service-ca\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.110312 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-tls\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.204497 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.204723 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.704690023 +0000 UTC m=+139.045752637 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.205923 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0816a7-7a6a-40ac-a63a-3c26278426d8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.206101 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.206232 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9k58\" (UniqueName: \"kubernetes.io/projected/c1cd36b3-0ae5-4d77-a972-4521a0bed069-kube-api-access-s9k58\") pod \"package-server-manager-789f6589d5-8vzfs\" (UID: \"c1cd36b3-0ae5-4d77-a972-4521a0bed069\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.206387 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-socket-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.206513 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e281c913-e265-4ce8-af6a-11f255f6faf1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-x52gm\" (UID: \"e281c913-e265-4ce8-af6a-11f255f6faf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.206614 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/891a06ac-8101-4fab-a947-2adf9d8eeb7f-images\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.206836 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsc77\" (UniqueName: \"kubernetes.io/projected/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-kube-api-access-xsc77\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.206992 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7jnz\" (UniqueName: \"kubernetes.io/projected/d6391e1f-a04c-44ae-9528-b80530aa1d4a-kube-api-access-w7jnz\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207102 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-ca\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207245 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-audit\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207358 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207469 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d87a8800-e3bd-4e0b-89c4-fe73193110fb-audit-dir\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207582 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3d9897dc-10eb-4947-b5ce-63362338dfd1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207696 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwxj4\" (UniqueName: \"kubernetes.io/projected/e281c913-e265-4ce8-af6a-11f255f6faf1-kube-api-access-bwxj4\") pod \"control-plane-machine-set-operator-78cbb6b69f-x52gm\" (UID: \"e281c913-e265-4ce8-af6a-11f255f6faf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207810 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71224ef1-9751-49f0-89d6-18b5225f97cb-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207954 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4692b73d-a699-4ef5-82e8-cac30360e1b3-audit-dir\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208088 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-serving-cert\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208197 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fdf64638-6ce1-424a-af26-a49e98a29582-metrics-tls\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208310 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fdf64638-6ce1-424a-af26-a49e98a29582-trusted-ca\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208431 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208547 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4692b73d-a699-4ef5-82e8-cac30360e1b3-audit-dir\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208599 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71224ef1-9751-49f0-89d6-18b5225f97cb-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208551 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/237e4a75-4edd-4622-87f4-03a1f620649d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208251 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-ca\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208656 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-config\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.207979 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d87a8800-e3bd-4e0b-89c4-fe73193110fb-audit-dir\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208200 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-audit\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208725 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-config\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208895 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc27f\" (UniqueName: \"kubernetes.io/projected/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-kube-api-access-qc27f\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208945 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7acc3cd6-974f-4584-ac4d-d0e443052796-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208983 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-tmpfs\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209012 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l75sx\" (UniqueName: \"kubernetes.io/projected/478697e8-c090-46b8-adc9-88d2592e75a8-kube-api-access-l75sx\") pod \"multus-admission-controller-857f4d67dd-7z87w\" (UID: \"478697e8-c090-46b8-adc9-88d2592e75a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209040 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-plugins-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209078 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn9fh\" (UniqueName: \"kubernetes.io/projected/237e4a75-4edd-4622-87f4-03a1f620649d-kube-api-access-cn9fh\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209107 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/7102c26c-508b-485c-8a0e-8d35333641ce-certs\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209154 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr8jp\" (UniqueName: \"kubernetes.io/projected/f9f44648-d5f1-49ee-a394-115e43c97fc9-kube-api-access-lr8jp\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209182 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a905c75c-cff5-4137-b8ee-212bfc0334ff-trusted-ca\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209206 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-658zl\" (UniqueName: \"kubernetes.io/projected/c25af517-aa72-4f6b-9271-e936daa641d7-kube-api-access-658zl\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209242 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8fa1328-3653-44b4-803e-e9c41249bc4f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209281 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-audit-policies\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209316 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3d9897dc-10eb-4947-b5ce-63362338dfd1-proxy-tls\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209340 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f1e52a75-bace-452e-989d-3907666cdd11-srv-cert\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209366 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71224ef1-9751-49f0-89d6-18b5225f97cb-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209388 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2jxq\" (UniqueName: \"kubernetes.io/projected/a905c75c-cff5-4137-b8ee-212bfc0334ff-kube-api-access-v2jxq\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209415 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw5zt\" (UniqueName: \"kubernetes.io/projected/cf41adb4-ca77-4997-a2db-0e45bbe317c3-kube-api-access-hw5zt\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209448 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-config\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209477 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209523 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5f30d9-fc2d-47dc-8662-d649023f9521-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.209581 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-config\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.208427 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/891a06ac-8101-4fab-a947-2adf9d8eeb7f-images\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210021 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0816a7-7a6a-40ac-a63a-3c26278426d8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210069 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210120 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3d9897dc-10eb-4947-b5ce-63362338dfd1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210137 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/891a06ac-8101-4fab-a947-2adf9d8eeb7f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210203 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a5f30d9-fc2d-47dc-8662-d649023f9521-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210599 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d87a8800-e3bd-4e0b-89c4-fe73193110fb-node-pullsecrets\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210884 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-audit-policies\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.210958 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d87a8800-e3bd-4e0b-89c4-fe73193110fb-node-pullsecrets\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211025 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/478697e8-c090-46b8-adc9-88d2592e75a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7z87w\" (UID: \"478697e8-c090-46b8-adc9-88d2592e75a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211058 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-stats-auth\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211077 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03-cert\") pod \"ingress-canary-48665\" (UID: \"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03\") " pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211102 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-trusted-ca\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211121 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211140 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a5f30d9-fc2d-47dc-8662-d649023f9521-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211191 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-images\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211223 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-config\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211233 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-etcd-client\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211260 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70be4dce-113d-4659-b5dc-55c3c724de12-secret-volume\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211290 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211359 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99lnh\" (UniqueName: \"kubernetes.io/projected/f1e52a75-bace-452e-989d-3907666cdd11-kube-api-access-99lnh\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211492 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-csi-data-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211518 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-serving-cert\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211537 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211566 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngh6l\" (UniqueName: \"kubernetes.io/projected/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-kube-api-access-ngh6l\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.211594 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.711581438 +0000 UTC m=+139.052644052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211629 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a612c2a1-ac77-40f9-8cd9-18cf9f68b963-metrics-tls\") pod \"dns-operator-744455d44c-7t8bh\" (UID: \"a612c2a1-ac77-40f9-8cd9-18cf9f68b963\") " pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211651 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-config\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211672 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gskd\" (UniqueName: \"kubernetes.io/projected/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-kube-api-access-2gskd\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211691 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8fa1328-3653-44b4-803e-e9c41249bc4f-config\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211709 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-proxy-tls\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211731 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1cd36b3-0ae5-4d77-a972-4521a0bed069-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8vzfs\" (UID: \"c1cd36b3-0ae5-4d77-a972-4521a0bed069\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211794 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891a06ac-8101-4fab-a947-2adf9d8eeb7f-config\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211815 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-config\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211833 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqmpc\" (UniqueName: \"kubernetes.io/projected/a612c2a1-ac77-40f9-8cd9-18cf9f68b963-kube-api-access-jqmpc\") pod \"dns-operator-744455d44c-7t8bh\" (UID: \"a612c2a1-ac77-40f9-8cd9-18cf9f68b963\") " pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211854 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-service-ca\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211886 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211905 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211922 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrffj\" (UniqueName: \"kubernetes.io/projected/4692b73d-a699-4ef5-82e8-cac30360e1b3-kube-api-access-xrffj\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211939 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-etcd-serving-ca\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211957 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdr79\" (UniqueName: \"kubernetes.io/projected/362d5a3b-2574-499c-8142-0f3e4369f573-kube-api-access-fdr79\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.211977 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-service-ca-bundle\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212113 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212135 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts4s2\" (UniqueName: \"kubernetes.io/projected/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-kube-api-access-ts4s2\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212163 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0421da7c-2216-4c4a-8422-2e3391a256b2-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212188 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f1e52a75-bace-452e-989d-3907666cdd11-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212217 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xwjm\" (UniqueName: \"kubernetes.io/projected/3d9897dc-10eb-4947-b5ce-63362338dfd1-kube-api-access-6xwjm\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212247 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqxzs\" (UniqueName: \"kubernetes.io/projected/7102c26c-508b-485c-8a0e-8d35333641ce-kube-api-access-fqxzs\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212277 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c25af517-aa72-4f6b-9271-e936daa641d7-config-volume\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212298 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6xgx\" (UniqueName: \"kubernetes.io/projected/70be4dce-113d-4659-b5dc-55c3c724de12-kube-api-access-w6xgx\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212315 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpmkb\" (UniqueName: \"kubernetes.io/projected/0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03-kube-api-access-qpmkb\") pod \"ingress-canary-48665\" (UID: \"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03\") " pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212337 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-client-ca\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212356 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212376 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d6391e1f-a04c-44ae-9528-b80530aa1d4a-srv-cert\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212397 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-audit-policies\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212420 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxlfl\" (UniqueName: \"kubernetes.io/projected/811f6598-f603-4a15-8dec-add067d82d5c-kube-api-access-zxlfl\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212551 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a905c75c-cff5-4137-b8ee-212bfc0334ff-serving-cert\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212582 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-registration-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212626 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-serving-cert\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212647 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-oauth-serving-cert\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212651 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-config\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212668 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-metrics-certs\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212690 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-mountpoint-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212709 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fdf64638-6ce1-424a-af26-a49e98a29582-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212737 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-machine-approver-tls\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212764 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70be4dce-113d-4659-b5dc-55c3c724de12-config-volume\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212783 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6whxk\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-kube-api-access-6whxk\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212803 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c3620f-2835-41cc-8152-40ff0eb6db8c-serving-cert\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212819 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d6391e1f-a04c-44ae-9528-b80530aa1d4a-profile-collector-cert\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212837 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r2gb\" (UniqueName: \"kubernetes.io/projected/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-kube-api-access-7r2gb\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212859 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9kmw\" (UniqueName: \"kubernetes.io/projected/0382f04b-cd24-421f-8bad-f147dc41bf91-kube-api-access-f9kmw\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212895 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p86cs\" (UniqueName: \"kubernetes.io/projected/7acc3cd6-974f-4584-ac4d-d0e443052796-kube-api-access-p86cs\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212912 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/7102c26c-508b-485c-8a0e-8d35333641ce-node-bootstrap-token\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212929 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5f30d9-fc2d-47dc-8662-d649023f9521-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212948 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf41adb4-ca77-4997-a2db-0e45bbe317c3-service-ca-bundle\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.212968 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n52nj\" (UniqueName: \"kubernetes.io/projected/96c3620f-2835-41cc-8152-40ff0eb6db8c-kube-api-access-n52nj\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.213138 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8fa1328-3653-44b4-803e-e9c41249bc4f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.213296 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.213531 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/891a06ac-8101-4fab-a947-2adf9d8eeb7f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.214189 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3d9897dc-10eb-4947-b5ce-63362338dfd1-proxy-tls\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.214481 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-trusted-ca\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.214576 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-serving-cert\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.214911 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/237e4a75-4edd-4622-87f4-03a1f620649d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.215587 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.216672 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.217174 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-etcd-serving-ca\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.217654 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-etcd-client\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.217750 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-client-ca\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.217949 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.218673 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7acc3cd6-974f-4584-ac4d-d0e443052796-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.219096 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-config\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.219132 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.219143 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8fa1328-3653-44b4-803e-e9c41249bc4f-config\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.219660 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-service-ca\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.219938 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/891a06ac-8101-4fab-a947-2adf9d8eeb7f-config\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.220329 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-oauth-serving-cert\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.221488 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-serving-cert\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.221723 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-signing-key\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222156 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71224ef1-9751-49f0-89d6-18b5225f97cb-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222208 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a612c2a1-ac77-40f9-8cd9-18cf9f68b963-metrics-tls\") pod \"dns-operator-744455d44c-7t8bh\" (UID: \"a612c2a1-ac77-40f9-8cd9-18cf9f68b963\") " pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222236 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h24l8\" (UniqueName: \"kubernetes.io/projected/da0816a7-7a6a-40ac-a63a-3c26278426d8-kube-api-access-h24l8\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222354 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222585 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-certificates\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222677 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-encryption-config\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222710 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-config\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222778 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0382f04b-cd24-421f-8bad-f147dc41bf91-config\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222809 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb4mz\" (UniqueName: \"kubernetes.io/projected/891a06ac-8101-4fab-a947-2adf9d8eeb7f-kube-api-access-zb4mz\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222838 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-trusted-ca-bundle\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.222974 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4czl\" (UniqueName: \"kubernetes.io/projected/fdf64638-6ce1-424a-af26-a49e98a29582-kube-api-access-d4czl\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223008 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223041 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0382f04b-cd24-421f-8bad-f147dc41bf91-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223083 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvlpd\" (UniqueName: \"kubernetes.io/projected/ffde146e-eb04-4056-acb2-febc2da78e46-kube-api-access-kvlpd\") pod \"downloads-7954f5f757-rbbsb\" (UID: \"ffde146e-eb04-4056-acb2-febc2da78e46\") " pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223170 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223234 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-image-import-ca\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223258 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223299 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7acc3cd6-974f-4584-ac4d-d0e443052796-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223335 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a905c75c-cff5-4137-b8ee-212bfc0334ff-config\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223362 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld225\" (UniqueName: \"kubernetes.io/projected/e68d4dc9-2488-4666-897a-f2ce27934d81-kube-api-access-ld225\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223397 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0816a7-7a6a-40ac-a63a-3c26278426d8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223425 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-webhook-cert\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223453 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-serving-cert\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.223504 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225020 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/7102c26c-508b-485c-8a0e-8d35333641ce-node-bootstrap-token\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225192 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225456 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7acc3cd6-974f-4584-ac4d-d0e443052796-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225496 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-trusted-ca-bundle\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225509 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0382f04b-cd24-421f-8bad-f147dc41bf91-config\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225639 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn5g5\" (UniqueName: \"kubernetes.io/projected/d87a8800-e3bd-4e0b-89c4-fe73193110fb-kube-api-access-sn5g5\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225712 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8fa1328-3653-44b4-803e-e9c41249bc4f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225907 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-apiservice-cert\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.225986 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-client\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226039 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-bound-sa-token\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226093 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96c3620f-2835-41cc-8152-40ff0eb6db8c-serving-cert\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226145 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/362d5a3b-2574-499c-8142-0f3e4369f573-serving-cert\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226175 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0421da7c-2216-4c4a-8422-2e3391a256b2-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226234 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-oauth-config\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226259 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7f5s\" (UniqueName: \"kubernetes.io/projected/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-kube-api-access-q7f5s\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226483 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-serving-cert\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226622 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d6391e1f-a04c-44ae-9528-b80530aa1d4a-srv-cert\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227120 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-audit-policies\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227139 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-serving-cert\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227320 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0816a7-7a6a-40ac-a63a-3c26278426d8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.226246 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-config\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227352 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/7102c26c-508b-485c-8a0e-8d35333641ce-certs\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227536 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227587 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d87a8800-e3bd-4e0b-89c4-fe73193110fb-image-import-ca\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227639 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227696 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0421da7c-2216-4c4a-8422-2e3391a256b2-config\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.227994 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-trusted-ca-bundle\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228137 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228240 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/237e4a75-4edd-4622-87f4-03a1f620649d-serving-cert\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228300 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-config\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228340 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c25af517-aa72-4f6b-9271-e936daa641d7-metrics-tls\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228382 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d7gn\" (UniqueName: \"kubernetes.io/projected/2b4c7f51-5133-456b-9923-61854cdcf098-kube-api-access-7d7gn\") pod \"migrator-59844c95c7-xrctg\" (UID: \"2b4c7f51-5133-456b-9923-61854cdcf098\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228546 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228622 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-service-ca\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228660 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228747 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-machine-approver-tls\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228861 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-encryption-config\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.228960 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.229141 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4692b73d-a699-4ef5-82e8-cac30360e1b3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.229251 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-etcd-client\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.229398 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-default-certificate\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.229488 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-config\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.229654 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/811f6598-f603-4a15-8dec-add067d82d5c-audit-dir\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.229980 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-auth-proxy-config\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.230010 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.230009 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/811f6598-f603-4a15-8dec-add067d82d5c-audit-dir\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.230114 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-signing-cabundle\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.230428 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.230909 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-auth-proxy-config\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.231264 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-signing-cabundle\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.231516 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.231677 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0382f04b-cd24-421f-8bad-f147dc41bf91-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.231675 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-encryption-config\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.231679 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/362d5a3b-2574-499c-8142-0f3e4369f573-serving-cert\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.232105 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a5f30d9-fc2d-47dc-8662-d649023f9521-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.232230 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-serving-cert\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.232230 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d6391e1f-a04c-44ae-9528-b80530aa1d4a-profile-collector-cert\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.232525 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4692b73d-a699-4ef5-82e8-cac30360e1b3-encryption-config\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.232810 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-signing-key\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.232948 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d87a8800-e3bd-4e0b-89c4-fe73193110fb-etcd-client\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.233566 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/237e4a75-4edd-4622-87f4-03a1f620649d-serving-cert\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.233848 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-oauth-config\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.235267 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/362d5a3b-2574-499c-8142-0f3e4369f573-etcd-client\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.237228 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.249657 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7jnz\" (UniqueName: \"kubernetes.io/projected/d6391e1f-a04c-44ae-9528-b80530aa1d4a-kube-api-access-w7jnz\") pod \"catalog-operator-68c6474976-7vcg4\" (UID: \"d6391e1f-a04c-44ae-9528-b80530aa1d4a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.270059 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsc77\" (UniqueName: \"kubernetes.io/projected/87967d07-5c59-4d94-a9c4-1a9f1058e0a2-kube-api-access-xsc77\") pod \"machine-approver-56656f9798-4bdzq\" (UID: \"87967d07-5c59-4d94-a9c4-1a9f1058e0a2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.287232 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn9fh\" (UniqueName: \"kubernetes.io/projected/237e4a75-4edd-4622-87f4-03a1f620649d-kube-api-access-cn9fh\") pod \"openshift-config-operator-7777fb866f-pczb2\" (UID: \"237e4a75-4edd-4622-87f4-03a1f620649d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.328472 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc27f\" (UniqueName: \"kubernetes.io/projected/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-kube-api-access-qc27f\") pod \"route-controller-manager-6576b87f9c-tq2cv\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331141 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.331235 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.83121453 +0000 UTC m=+139.172277144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331313 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-metrics-certs\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331337 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-mountpoint-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331354 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fdf64638-6ce1-424a-af26-a49e98a29582-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331372 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70be4dce-113d-4659-b5dc-55c3c724de12-config-volume\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331399 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r2gb\" (UniqueName: \"kubernetes.io/projected/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-kube-api-access-7r2gb\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331437 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf41adb4-ca77-4997-a2db-0e45bbe317c3-service-ca-bundle\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331441 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-mountpoint-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331518 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4czl\" (UniqueName: \"kubernetes.io/projected/fdf64638-6ce1-424a-af26-a49e98a29582-kube-api-access-d4czl\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331550 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a905c75c-cff5-4137-b8ee-212bfc0334ff-config\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331571 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld225\" (UniqueName: \"kubernetes.io/projected/e68d4dc9-2488-4666-897a-f2ce27934d81-kube-api-access-ld225\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331590 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-webhook-cert\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331611 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-serving-cert\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331645 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-apiservice-cert\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331704 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c25af517-aa72-4f6b-9271-e936daa641d7-metrics-tls\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331721 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d7gn\" (UniqueName: \"kubernetes.io/projected/2b4c7f51-5133-456b-9923-61854cdcf098-kube-api-access-7d7gn\") pod \"migrator-59844c95c7-xrctg\" (UID: \"2b4c7f51-5133-456b-9923-61854cdcf098\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331749 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-default-certificate\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331772 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9k58\" (UniqueName: \"kubernetes.io/projected/c1cd36b3-0ae5-4d77-a972-4521a0bed069-kube-api-access-s9k58\") pod \"package-server-manager-789f6589d5-8vzfs\" (UID: \"c1cd36b3-0ae5-4d77-a972-4521a0bed069\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331788 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-socket-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331809 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e281c913-e265-4ce8-af6a-11f255f6faf1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-x52gm\" (UID: \"e281c913-e265-4ce8-af6a-11f255f6faf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331831 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwxj4\" (UniqueName: \"kubernetes.io/projected/e281c913-e265-4ce8-af6a-11f255f6faf1-kube-api-access-bwxj4\") pod \"control-plane-machine-set-operator-78cbb6b69f-x52gm\" (UID: \"e281c913-e265-4ce8-af6a-11f255f6faf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331852 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fdf64638-6ce1-424a-af26-a49e98a29582-metrics-tls\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331911 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fdf64638-6ce1-424a-af26-a49e98a29582-trusted-ca\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331935 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331955 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-config\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.331983 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-tmpfs\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332006 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l75sx\" (UniqueName: \"kubernetes.io/projected/478697e8-c090-46b8-adc9-88d2592e75a8-kube-api-access-l75sx\") pod \"multus-admission-controller-857f4d67dd-7z87w\" (UID: \"478697e8-c090-46b8-adc9-88d2592e75a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332026 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-plugins-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332064 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a905c75c-cff5-4137-b8ee-212bfc0334ff-trusted-ca\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332095 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-658zl\" (UniqueName: \"kubernetes.io/projected/c25af517-aa72-4f6b-9271-e936daa641d7-kube-api-access-658zl\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332139 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f1e52a75-bace-452e-989d-3907666cdd11-srv-cert\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332162 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2jxq\" (UniqueName: \"kubernetes.io/projected/a905c75c-cff5-4137-b8ee-212bfc0334ff-kube-api-access-v2jxq\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332183 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw5zt\" (UniqueName: \"kubernetes.io/projected/cf41adb4-ca77-4997-a2db-0e45bbe317c3-kube-api-access-hw5zt\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332229 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/478697e8-c090-46b8-adc9-88d2592e75a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7z87w\" (UID: \"478697e8-c090-46b8-adc9-88d2592e75a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332253 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-stats-auth\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332275 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf41adb4-ca77-4997-a2db-0e45bbe317c3-service-ca-bundle\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332287 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-images\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332274 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70be4dce-113d-4659-b5dc-55c3c724de12-config-volume\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332311 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03-cert\") pod \"ingress-canary-48665\" (UID: \"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03\") " pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332337 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70be4dce-113d-4659-b5dc-55c3c724de12-secret-volume\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332363 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332391 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99lnh\" (UniqueName: \"kubernetes.io/projected/f1e52a75-bace-452e-989d-3907666cdd11-kube-api-access-99lnh\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332413 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-csi-data-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332442 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gskd\" (UniqueName: \"kubernetes.io/projected/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-kube-api-access-2gskd\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332464 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-proxy-tls\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332487 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1cd36b3-0ae5-4d77-a972-4521a0bed069-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8vzfs\" (UID: \"c1cd36b3-0ae5-4d77-a972-4521a0bed069\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332605 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f1e52a75-bace-452e-989d-3907666cdd11-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332657 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts4s2\" (UniqueName: \"kubernetes.io/projected/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-kube-api-access-ts4s2\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332710 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c25af517-aa72-4f6b-9271-e936daa641d7-config-volume\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332734 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6xgx\" (UniqueName: \"kubernetes.io/projected/70be4dce-113d-4659-b5dc-55c3c724de12-kube-api-access-w6xgx\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332758 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpmkb\" (UniqueName: \"kubernetes.io/projected/0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03-kube-api-access-qpmkb\") pod \"ingress-canary-48665\" (UID: \"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03\") " pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332791 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a905c75c-cff5-4137-b8ee-212bfc0334ff-serving-cert\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.332813 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-registration-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.333140 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-registration-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.333247 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-socket-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.333425 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a905c75c-cff5-4137-b8ee-212bfc0334ff-config\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.333953 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-plugins-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.335159 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c25af517-aa72-4f6b-9271-e936daa641d7-config-volume\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.335370 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-metrics-certs\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.336099 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c25af517-aa72-4f6b-9271-e936daa641d7-metrics-tls\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.336131 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-serving-cert\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.336214 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-webhook-cert\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.336513 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.83649253 +0000 UTC m=+139.177555234 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.336780 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-config\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.337597 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-apiservice-cert\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.337611 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e281c913-e265-4ce8-af6a-11f255f6faf1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-x52gm\" (UID: \"e281c913-e265-4ce8-af6a-11f255f6faf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.338374 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-default-certificate\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.338627 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-tmpfs\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.338845 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-proxy-tls\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.339064 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e68d4dc9-2488-4666-897a-f2ce27934d81-csi-data-dir\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.339212 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f1e52a75-bace-452e-989d-3907666cdd11-profile-collector-cert\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.339539 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-auth-proxy-config\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.339815 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.341722 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c1cd36b3-0ae5-4d77-a972-4521a0bed069-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-8vzfs\" (UID: \"c1cd36b3-0ae5-4d77-a972-4521a0bed069\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.342069 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/cf41adb4-ca77-4997-a2db-0e45bbe317c3-stats-auth\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.343834 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70be4dce-113d-4659-b5dc-55c3c724de12-secret-volume\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.343982 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/478697e8-c090-46b8-adc9-88d2592e75a8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7z87w\" (UID: \"478697e8-c090-46b8-adc9-88d2592e75a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.348702 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f1e52a75-bace-452e-989d-3907666cdd11-srv-cert\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.351188 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3a5f30d9-fc2d-47dc-8662-d649023f9521-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nzmkn\" (UID: \"3a5f30d9-fc2d-47dc-8662-d649023f9521\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: W1205 15:01:00.354635 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87967d07_5c59_4d94_a9c4_1a9f1058e0a2.slice/crio-19c3446359716edc65d27b1e2ac25e892d894ea146b01f8b71dd8bbf06c1ec96 WatchSource:0}: Error finding container 19c3446359716edc65d27b1e2ac25e892d894ea146b01f8b71dd8bbf06c1ec96: Status 404 returned error can't find the container with id 19c3446359716edc65d27b1e2ac25e892d894ea146b01f8b71dd8bbf06c1ec96 Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.369660 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngh6l\" (UniqueName: \"kubernetes.io/projected/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-kube-api-access-ngh6l\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.393046 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxlfl\" (UniqueName: \"kubernetes.io/projected/811f6598-f603-4a15-8dec-add067d82d5c-kube-api-access-zxlfl\") pod \"oauth-openshift-558db77b4-svf7z\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.393685 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.411501 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdr79\" (UniqueName: \"kubernetes.io/projected/362d5a3b-2574-499c-8142-0f3e4369f573-kube-api-access-fdr79\") pod \"etcd-operator-b45778765-qskjn\" (UID: \"362d5a3b-2574-499c-8142-0f3e4369f573\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.434403 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.435506 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:00.935487919 +0000 UTC m=+139.276550533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.445317 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.447468 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xwjm\" (UniqueName: \"kubernetes.io/projected/3d9897dc-10eb-4947-b5ce-63362338dfd1-kube-api-access-6xwjm\") pod \"machine-config-controller-84d6567774-fjcvg\" (UID: \"3d9897dc-10eb-4947-b5ce-63362338dfd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.471675 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.489353 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqmpc\" (UniqueName: \"kubernetes.io/projected/a612c2a1-ac77-40f9-8cd9-18cf9f68b963-kube-api-access-jqmpc\") pod \"dns-operator-744455d44c-7t8bh\" (UID: \"a612c2a1-ac77-40f9-8cd9-18cf9f68b963\") " pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.490660 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fdf64638-6ce1-424a-af26-a49e98a29582-metrics-tls\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.490660 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/96c3620f-2835-41cc-8152-40ff0eb6db8c-service-ca-bundle\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.492245 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-service-ca\") pod \"console-f9d7485db-zb8r7\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.492547 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.493757 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fdf64638-6ce1-424a-af26-a49e98a29582-trusted-ca\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.493783 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-certificates\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.494776 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0421da7c-2216-4c4a-8422-2e3391a256b2-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.495089 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a905c75c-cff5-4137-b8ee-212bfc0334ff-trusted-ca\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.495055 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0421da7c-2216-4c4a-8422-2e3391a256b2-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.495475 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0421da7c-2216-4c4a-8422-2e3391a256b2-config\") pod \"kube-controller-manager-operator-78b949d7b-2fvvn\" (UID: \"0421da7c-2216-4c4a-8422-2e3391a256b2\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.497213 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr8jp\" (UniqueName: \"kubernetes.io/projected/f9f44648-d5f1-49ee-a394-115e43c97fc9-kube-api-access-lr8jp\") pod \"marketplace-operator-79b997595-nc87t\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.498562 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-images\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.498933 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqxzs\" (UniqueName: \"kubernetes.io/projected/7102c26c-508b-485c-8a0e-8d35333641ce-kube-api-access-fqxzs\") pod \"machine-config-server-zq9dg\" (UID: \"7102c26c-508b-485c-8a0e-8d35333641ce\") " pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.502510 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03-cert\") pod \"ingress-canary-48665\" (UID: \"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03\") " pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.502590 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a905c75c-cff5-4137-b8ee-212bfc0334ff-serving-cert\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.518523 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.521417 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrffj\" (UniqueName: \"kubernetes.io/projected/4692b73d-a699-4ef5-82e8-cac30360e1b3-kube-api-access-xrffj\") pod \"apiserver-7bbb656c7d-d4nr2\" (UID: \"4692b73d-a699-4ef5-82e8-cac30360e1b3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.528284 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p86cs\" (UniqueName: \"kubernetes.io/projected/7acc3cd6-974f-4584-ac4d-d0e443052796-kube-api-access-p86cs\") pod \"kube-storage-version-migrator-operator-b67b599dd-htv8l\" (UID: \"7acc3cd6-974f-4584-ac4d-d0e443052796\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.537208 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.537568 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.037552156 +0000 UTC m=+139.378614770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.540658 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.548523 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n52nj\" (UniqueName: \"kubernetes.io/projected/96c3620f-2835-41cc-8152-40ff0eb6db8c-kube-api-access-n52nj\") pod \"authentication-operator-69f744f599-hvqs4\" (UID: \"96c3620f-2835-41cc-8152-40ff0eb6db8c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.561122 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.566967 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.568404 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6whxk\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-kube-api-access-6whxk\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.581004 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4"] Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.588988 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.591737 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9kmw\" (UniqueName: \"kubernetes.io/projected/0382f04b-cd24-421f-8bad-f147dc41bf91-kube-api-access-f9kmw\") pod \"openshift-apiserver-operator-796bbdcf4f-j5lpd\" (UID: \"0382f04b-cd24-421f-8bad-f147dc41bf91\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.600921 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.610999 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h24l8\" (UniqueName: \"kubernetes.io/projected/da0816a7-7a6a-40ac-a63a-3c26278426d8-kube-api-access-h24l8\") pod \"openshift-controller-manager-operator-756b6f6bc6-zsj6v\" (UID: \"da0816a7-7a6a-40ac-a63a-3c26278426d8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.626802 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.629370 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb4mz\" (UniqueName: \"kubernetes.io/projected/891a06ac-8101-4fab-a947-2adf9d8eeb7f-kube-api-access-zb4mz\") pod \"machine-api-operator-5694c8668f-9lj8m\" (UID: \"891a06ac-8101-4fab-a947-2adf9d8eeb7f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.638479 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.639440 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.139420447 +0000 UTC m=+139.480483061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.644327 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.649360 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvlpd\" (UniqueName: \"kubernetes.io/projected/ffde146e-eb04-4056-acb2-febc2da78e46-kube-api-access-kvlpd\") pod \"downloads-7954f5f757-rbbsb\" (UID: \"ffde146e-eb04-4056-acb2-febc2da78e46\") " pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.682709 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.690580 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-svf7z"] Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.691336 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.698794 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.709802 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8fa1328-3653-44b4-803e-e9c41249bc4f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-589pg\" (UID: \"d8fa1328-3653-44b4-803e-e9c41249bc4f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.730674 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7f5s\" (UniqueName: \"kubernetes.io/projected/8a02be23-a9cd-4ea6-9b6f-9e383856cb1d-kube-api-access-q7f5s\") pod \"service-ca-9c57cc56f-r9h7q\" (UID: \"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d\") " pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.731730 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-pczb2"] Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.737128 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.740712 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.741126 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.241109793 +0000 UTC m=+139.582172417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.744075 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-zq9dg" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.750995 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-bound-sa-token\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.757481 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.767672 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fdf64638-6ce1-424a-af26-a49e98a29582-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.779125 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7t8bh"] Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.780154 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zb8r7"] Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.782640 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.791983 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r2gb\" (UniqueName: \"kubernetes.io/projected/284f860e-20fc-48b2-91f2-a3e6fcb6b2c7-kube-api-access-7r2gb\") pod \"packageserver-d55dfcdfc-tn7k5\" (UID: \"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.794160 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.807152 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" event={"ID":"87967d07-5c59-4d94-a9c4-1a9f1058e0a2","Type":"ContainerStarted","Data":"19c3446359716edc65d27b1e2ac25e892d894ea146b01f8b71dd8bbf06c1ec96"} Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.811184 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" event={"ID":"7b21b2d6-8970-40a9-ad70-4ef9fed68b72","Type":"ContainerStarted","Data":"b65aee86fb3d94861d61fd398e0af48138c3529a0b4cf01aa3a8168ed42d09f6"} Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.813757 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" event={"ID":"6e933683-f464-453c-8228-97b31d8b1f42","Type":"ContainerStarted","Data":"d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d"} Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.836156 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4czl\" (UniqueName: \"kubernetes.io/projected/fdf64638-6ce1-424a-af26-a49e98a29582-kube-api-access-d4czl\") pod \"ingress-operator-5b745b69d9-jh844\" (UID: \"fdf64638-6ce1-424a-af26-a49e98a29582\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.841986 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.842194 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.342171221 +0000 UTC m=+139.683233845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.842437 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.842749 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.342734937 +0000 UTC m=+139.683797551 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.866221 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwxj4\" (UniqueName: \"kubernetes.io/projected/e281c913-e265-4ce8-af6a-11f255f6faf1-kube-api-access-bwxj4\") pod \"control-plane-machine-set-operator-78cbb6b69f-x52gm\" (UID: \"e281c913-e265-4ce8-af6a-11f255f6faf1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.885527 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn5g5\" (UniqueName: \"kubernetes.io/projected/d87a8800-e3bd-4e0b-89c4-fe73193110fb-kube-api-access-sn5g5\") pod \"apiserver-76f77b778f-g59rm\" (UID: \"d87a8800-e3bd-4e0b-89c4-fe73193110fb\") " pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.886057 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9k58\" (UniqueName: \"kubernetes.io/projected/c1cd36b3-0ae5-4d77-a972-4521a0bed069-kube-api-access-s9k58\") pod \"package-server-manager-789f6589d5-8vzfs\" (UID: \"c1cd36b3-0ae5-4d77-a972-4521a0bed069\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.887074 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d7gn\" (UniqueName: \"kubernetes.io/projected/2b4c7f51-5133-456b-9923-61854cdcf098-kube-api-access-7d7gn\") pod \"migrator-59844c95c7-xrctg\" (UID: \"2b4c7f51-5133-456b-9923-61854cdcf098\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" Dec 05 15:01:00 crc kubenswrapper[4840]: W1205 15:01:00.889049 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6391e1f_a04c_44ae_9528_b80530aa1d4a.slice/crio-62cbaeb70b0f6766425dd5a980655921ad094fc5ab4582168f0b7b9e269d8538 WatchSource:0}: Error finding container 62cbaeb70b0f6766425dd5a980655921ad094fc5ab4582168f0b7b9e269d8538: Status 404 returned error can't find the container with id 62cbaeb70b0f6766425dd5a980655921ad094fc5ab4582168f0b7b9e269d8538 Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.889940 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" Dec 05 15:01:00 crc kubenswrapper[4840]: W1205 15:01:00.890216 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod811f6598_f603_4a15_8dec_add067d82d5c.slice/crio-22211d573c969e8c92188faeb74edac067d1c26f980ea7d6b6e3e262f1168451 WatchSource:0}: Error finding container 22211d573c969e8c92188faeb74edac067d1c26f980ea7d6b6e3e262f1168451: Status 404 returned error can't find the container with id 22211d573c969e8c92188faeb74edac067d1c26f980ea7d6b6e3e262f1168451 Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.891311 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts4s2\" (UniqueName: \"kubernetes.io/projected/1d0ca542-fc9c-4849-ba80-c7fd15606fb5-kube-api-access-ts4s2\") pod \"machine-config-operator-74547568cd-69w55\" (UID: \"1d0ca542-fc9c-4849-ba80-c7fd15606fb5\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: W1205 15:01:00.906777 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod237e4a75_4edd_4622_87f4_03a1f620649d.slice/crio-637d9618b67c68fc28c55bfc90c803a58fd5f243d6a84b867bce2a28d2f5d178 WatchSource:0}: Error finding container 637d9618b67c68fc28c55bfc90c803a58fd5f243d6a84b867bce2a28d2f5d178: Status 404 returned error can't find the container with id 637d9618b67c68fc28c55bfc90c803a58fd5f243d6a84b867bce2a28d2f5d178 Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.907344 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" Dec 05 15:01:00 crc kubenswrapper[4840]: W1205 15:01:00.907771 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c949308_6fa4_47cf_9275_b4ddcdcbb30a.slice/crio-44c8779241357345fdb01994e04250fd34d695cd5523d76c96c67a7d612fcf62 WatchSource:0}: Error finding container 44c8779241357345fdb01994e04250fd34d695cd5523d76c96c67a7d612fcf62: Status 404 returned error can't find the container with id 44c8779241357345fdb01994e04250fd34d695cd5523d76c96c67a7d612fcf62 Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.908886 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2jxq\" (UniqueName: \"kubernetes.io/projected/a905c75c-cff5-4137-b8ee-212bfc0334ff-kube-api-access-v2jxq\") pod \"console-operator-58897d9998-mp7c5\" (UID: \"a905c75c-cff5-4137-b8ee-212bfc0334ff\") " pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.915926 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.930291 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6xgx\" (UniqueName: \"kubernetes.io/projected/70be4dce-113d-4659-b5dc-55c3c724de12-kube-api-access-w6xgx\") pod \"collect-profiles-29415780-k44mq\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.935212 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.949296 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:00 crc kubenswrapper[4840]: E1205 15:01:00.949713 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.449693082 +0000 UTC m=+139.790755696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.950197 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l75sx\" (UniqueName: \"kubernetes.io/projected/478697e8-c090-46b8-adc9-88d2592e75a8-kube-api-access-l75sx\") pod \"multus-admission-controller-857f4d67dd-7z87w\" (UID: \"478697e8-c090-46b8-adc9-88d2592e75a8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.953076 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.959972 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.961750 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.977713 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.983817 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpmkb\" (UniqueName: \"kubernetes.io/projected/0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03-kube-api-access-qpmkb\") pod \"ingress-canary-48665\" (UID: \"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03\") " pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.986469 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" Dec 05 15:01:00 crc kubenswrapper[4840]: I1205 15:01:00.996359 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99lnh\" (UniqueName: \"kubernetes.io/projected/f1e52a75-bace-452e-989d-3907666cdd11-kube-api-access-99lnh\") pod \"olm-operator-6b444d44fb-7bdp5\" (UID: \"f1e52a75-bace-452e-989d-3907666cdd11\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.001897 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.009158 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.009308 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw5zt\" (UniqueName: \"kubernetes.io/projected/cf41adb4-ca77-4997-a2db-0e45bbe317c3-kube-api-access-hw5zt\") pod \"router-default-5444994796-qrgwq\" (UID: \"cf41adb4-ca77-4997-a2db-0e45bbe317c3\") " pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.017303 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.031365 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-48665" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.037570 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld225\" (UniqueName: \"kubernetes.io/projected/e68d4dc9-2488-4666-897a-f2ce27934d81-kube-api-access-ld225\") pod \"csi-hostpathplugin-fnhqw\" (UID: \"e68d4dc9-2488-4666-897a-f2ce27934d81\") " pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.050632 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.051098 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.551080639 +0000 UTC m=+139.892143253 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.056125 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-658zl\" (UniqueName: \"kubernetes.io/projected/c25af517-aa72-4f6b-9271-e936daa641d7-kube-api-access-658zl\") pod \"dns-default-5t5n2\" (UID: \"c25af517-aa72-4f6b-9271-e936daa641d7\") " pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:01 crc kubenswrapper[4840]: W1205 15:01:01.062849 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7102c26c_508b_485c_8a0e_8d35333641ce.slice/crio-24bada03a54c50edf05408fa225834e88470a9876a2f3a08588149f0bd9665dd WatchSource:0}: Error finding container 24bada03a54c50edf05408fa225834e88470a9876a2f3a08588149f0bd9665dd: Status 404 returned error can't find the container with id 24bada03a54c50edf05408fa225834e88470a9876a2f3a08588149f0bd9665dd Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.064723 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.068391 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gskd\" (UniqueName: \"kubernetes.io/projected/8cbd5e8f-ec51-4c95-a2e0-43b06373d323-kube-api-access-2gskd\") pod \"service-ca-operator-777779d784-pw7gj\" (UID: \"8cbd5e8f-ec51-4c95-a2e0-43b06373d323\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.069760 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.141068 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv"] Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.151589 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.151687 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.651656814 +0000 UTC m=+139.992719428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.151889 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.152180 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.652161748 +0000 UTC m=+139.993224362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.175703 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.179190 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l"] Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.182993 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.254485 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.254640 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.754607165 +0000 UTC m=+140.095669789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.254781 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.255120 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.755110189 +0000 UTC m=+140.096172803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.279181 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qskjn"] Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.286610 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn"] Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.323727 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.355534 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.355648 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.855626882 +0000 UTC m=+140.196689496 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.355929 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.356249 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.85623903 +0000 UTC m=+140.197301644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: W1205 15:01:01.415484 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a1bc4cd_5821_4b8a_a1d4_c3d0a1cf57f6.slice/crio-495e9d331ad54bd3b6771eb91800429cc75360d347d2ab28f56077afcf74bada WatchSource:0}: Error finding container 495e9d331ad54bd3b6771eb91800429cc75360d347d2ab28f56077afcf74bada: Status 404 returned error can't find the container with id 495e9d331ad54bd3b6771eb91800429cc75360d347d2ab28f56077afcf74bada Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.416535 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-rbbsb"] Dec 05 15:01:01 crc kubenswrapper[4840]: W1205 15:01:01.417157 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7acc3cd6_974f_4584_ac4d_d0e443052796.slice/crio-1e20b46dc8bcab12e60926439d9c4627815de456f461f19c69d7c9f5703ff54e WatchSource:0}: Error finding container 1e20b46dc8bcab12e60926439d9c4627815de456f461f19c69d7c9f5703ff54e: Status 404 returned error can't find the container with id 1e20b46dc8bcab12e60926439d9c4627815de456f461f19c69d7c9f5703ff54e Dec 05 15:01:01 crc kubenswrapper[4840]: W1205 15:01:01.420970 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod362d5a3b_2574_499c_8142_0f3e4369f573.slice/crio-752c5e33a2935a297cb1518482802008ec98fbb317b1b8fd2fe8de56fb4f8a39 WatchSource:0}: Error finding container 752c5e33a2935a297cb1518482802008ec98fbb317b1b8fd2fe8de56fb4f8a39: Status 404 returned error can't find the container with id 752c5e33a2935a297cb1518482802008ec98fbb317b1b8fd2fe8de56fb4f8a39 Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.456746 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.457140 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.957110702 +0000 UTC m=+140.298173316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.457410 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.457791 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:01.957755561 +0000 UTC m=+140.298818175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.558436 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.558747 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.058732556 +0000 UTC m=+140.399795170 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.662942 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.663307 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.163294613 +0000 UTC m=+140.504357227 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.731891 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jh844"] Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.764339 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.764784 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.264765183 +0000 UTC m=+140.605827797 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.847075 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" event={"ID":"80ec2d65-850c-4134-b5cd-f763de4964fb","Type":"ContainerStarted","Data":"24e94159b3dfe0e54e92d6f0fad53f5209e9d4fa088cfa8bee2552b074298888"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.848632 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" event={"ID":"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6","Type":"ContainerStarted","Data":"495e9d331ad54bd3b6771eb91800429cc75360d347d2ab28f56077afcf74bada"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.849929 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" event={"ID":"7acc3cd6-974f-4584-ac4d-d0e443052796","Type":"ContainerStarted","Data":"1e20b46dc8bcab12e60926439d9c4627815de456f461f19c69d7c9f5703ff54e"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.851633 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" event={"ID":"811f6598-f603-4a15-8dec-add067d82d5c","Type":"ContainerStarted","Data":"22211d573c969e8c92188faeb74edac067d1c26f980ea7d6b6e3e262f1168451"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.865625 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.866000 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.365986226 +0000 UTC m=+140.707048840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.867630 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zb8r7" event={"ID":"7c949308-6fa4-47cf-9275-b4ddcdcbb30a","Type":"ContainerStarted","Data":"44c8779241357345fdb01994e04250fd34d695cd5523d76c96c67a7d612fcf62"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.871802 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" event={"ID":"a612c2a1-ac77-40f9-8cd9-18cf9f68b963","Type":"ContainerStarted","Data":"eec7e89988d2cad1bee1c1514f185568d9a9fc6483327a0ff6158bdbe805df22"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.873533 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rbbsb" event={"ID":"ffde146e-eb04-4056-acb2-febc2da78e46","Type":"ContainerStarted","Data":"8dd351254fb57d5ca0709cff67b5bb2a891791b4c431a696b072eae52b70ec16"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.876090 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" event={"ID":"7b21b2d6-8970-40a9-ad70-4ef9fed68b72","Type":"ContainerStarted","Data":"0774545fab7f853b82635b283d03add46f328d1a8b9ef950288cab20ea9cb6db"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.878914 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zq9dg" event={"ID":"7102c26c-508b-485c-8a0e-8d35333641ce","Type":"ContainerStarted","Data":"24bada03a54c50edf05408fa225834e88470a9876a2f3a08588149f0bd9665dd"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.880561 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" event={"ID":"362d5a3b-2574-499c-8142-0f3e4369f573","Type":"ContainerStarted","Data":"752c5e33a2935a297cb1518482802008ec98fbb317b1b8fd2fe8de56fb4f8a39"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.881330 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" event={"ID":"237e4a75-4edd-4622-87f4-03a1f620649d","Type":"ContainerStarted","Data":"637d9618b67c68fc28c55bfc90c803a58fd5f243d6a84b867bce2a28d2f5d178"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.882782 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" event={"ID":"87967d07-5c59-4d94-a9c4-1a9f1058e0a2","Type":"ContainerStarted","Data":"65cfa57ec26596c3d0d7030bd781d80b5f4e768a3cc5475d86585546a827891f"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.883752 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" event={"ID":"d6391e1f-a04c-44ae-9528-b80530aa1d4a","Type":"ContainerStarted","Data":"62cbaeb70b0f6766425dd5a980655921ad094fc5ab4582168f0b7b9e269d8538"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.885728 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" event={"ID":"0421da7c-2216-4c4a-8422-2e3391a256b2","Type":"ContainerStarted","Data":"087c1b8e16577c22ff10fb8d297e595b7c127c7a1a307657446d854d9cc0b716"} Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.885749 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.888373 4840 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-8t4w9 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.888420 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" podUID="6e933683-f464-453c-8228-97b31d8b1f42" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 15:01:01 crc kubenswrapper[4840]: I1205 15:01:01.977410 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:01 crc kubenswrapper[4840]: E1205 15:01:01.978390 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.478375715 +0000 UTC m=+140.819438319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.042369 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.079040 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.079457 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.579442803 +0000 UTC m=+140.920505417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.082159 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-9lj8m"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.094666 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nc87t"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.131303 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-hvqs4"] Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.146850 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0382f04b_cd24_421f_8bad_f147dc41bf91.slice/crio-9f463979d329e95b69ff9bed311ab1a003aa7ef93a9fb0275aa1b86319e67125 WatchSource:0}: Error finding container 9f463979d329e95b69ff9bed311ab1a003aa7ef93a9fb0275aa1b86319e67125: Status 404 returned error can't find the container with id 9f463979d329e95b69ff9bed311ab1a003aa7ef93a9fb0275aa1b86319e67125 Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.156373 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf41adb4_ca77_4997_a2db_0e45bbe317c3.slice/crio-62cda13cd8188fcf4c2847e31895fd482eb9fcc7fcdf59be991d02ab3e0f5b4f WatchSource:0}: Error finding container 62cda13cd8188fcf4c2847e31895fd482eb9fcc7fcdf59be991d02ab3e0f5b4f: Status 404 returned error can't find the container with id 62cda13cd8188fcf4c2847e31895fd482eb9fcc7fcdf59be991d02ab3e0f5b4f Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.200703 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.200823 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.700806895 +0000 UTC m=+141.041869509 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.201436 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.201998 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.701978648 +0000 UTC m=+141.043041272 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.216589 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" podStartSLOduration=121.216569491 podStartE2EDuration="2m1.216569491s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:02.209662376 +0000 UTC m=+140.550724990" watchObservedRunningTime="2025-12-05 15:01:02.216569491 +0000 UTC m=+140.557632125" Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.232015 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-hr672" podStartSLOduration=121.231995427 podStartE2EDuration="2m1.231995427s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:02.224826615 +0000 UTC m=+140.565889229" watchObservedRunningTime="2025-12-05 15:01:02.231995427 +0000 UTC m=+140.573058041" Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.247508 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod891a06ac_8101_4fab_a947_2adf9d8eeb7f.slice/crio-99deeb7e93cadeea4bcb6943e2246cd8df9efaf8c2c9683d876a584bd4ad401e WatchSource:0}: Error finding container 99deeb7e93cadeea4bcb6943e2246cd8df9efaf8c2c9683d876a584bd4ad401e: Status 404 returned error can't find the container with id 99deeb7e93cadeea4bcb6943e2246cd8df9efaf8c2c9683d876a584bd4ad401e Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.317399 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.317858 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.817834895 +0000 UTC m=+141.158897529 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.318509 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.419437 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.420014 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:02.919996194 +0000 UTC m=+141.261058808 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.432939 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.434922 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.471404 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.475207 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.486565 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-fnhqw"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.492931 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.520019 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.520249 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.020228769 +0000 UTC m=+141.361291373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.523168 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.523703 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.023692357 +0000 UTC m=+141.364754971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.556833 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mp7c5"] Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.578810 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b4c7f51_5133_456b_9923_61854cdcf098.slice/crio-614a9a5d53eec83531e5b780b47654757687d21c89749cc51fcce48a8a04457c WatchSource:0}: Error finding container 614a9a5d53eec83531e5b780b47654757687d21c89749cc51fcce48a8a04457c: Status 404 returned error can't find the container with id 614a9a5d53eec83531e5b780b47654757687d21c89749cc51fcce48a8a04457c Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.579323 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-69w55"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.584216 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2"] Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.614674 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8fa1328_3653_44b4_803e_e9c41249bc4f.slice/crio-b40037ab2357196571112056f62b86d2983c4dfa2e46d8eeb764e29f88023358 WatchSource:0}: Error finding container b40037ab2357196571112056f62b86d2983c4dfa2e46d8eeb764e29f88023358: Status 404 returned error can't find the container with id b40037ab2357196571112056f62b86d2983c4dfa2e46d8eeb764e29f88023358 Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.617313 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d0ca542_fc9c_4849_ba80_c7fd15606fb5.slice/crio-8704a1e88e6a35cd2e15e53571d026fd4344911556b223f21d2263a0dc0a50ef WatchSource:0}: Error finding container 8704a1e88e6a35cd2e15e53571d026fd4344911556b223f21d2263a0dc0a50ef: Status 404 returned error can't find the container with id 8704a1e88e6a35cd2e15e53571d026fd4344911556b223f21d2263a0dc0a50ef Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.623767 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.624047 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.123982393 +0000 UTC m=+141.465045037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.624138 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.624411 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.124400325 +0000 UTC m=+141.465462939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.628089 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda0816a7_7a6a_40ac_a63a_3c26278426d8.slice/crio-9ca671da986d90d2479ac8e8e466e94335f385c40cd11326764b8829df6e06a4 WatchSource:0}: Error finding container 9ca671da986d90d2479ac8e8e466e94335f385c40cd11326764b8829df6e06a4: Status 404 returned error can't find the container with id 9ca671da986d90d2479ac8e8e466e94335f385c40cd11326764b8829df6e06a4 Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.634358 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4692b73d_a699_4ef5_82e8_cac30360e1b3.slice/crio-6d81a0540dc67f4867325edc27084bbb7052fad823915d410ae0e8b50b00212c WatchSource:0}: Error finding container 6d81a0540dc67f4867325edc27084bbb7052fad823915d410ae0e8b50b00212c: Status 404 returned error can't find the container with id 6d81a0540dc67f4867325edc27084bbb7052fad823915d410ae0e8b50b00212c Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.728335 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.728696 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.228677554 +0000 UTC m=+141.569740168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.757210 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-r9h7q"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.761055 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.777977 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5"] Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.778156 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1e52a75_bace_452e_989d_3907666cdd11.slice/crio-4350b19584b2aab5258769a02c16dafd92a8154acbc9a1187fdfe70252a6a92e WatchSource:0}: Error finding container 4350b19584b2aab5258769a02c16dafd92a8154acbc9a1187fdfe70252a6a92e: Status 404 returned error can't find the container with id 4350b19584b2aab5258769a02c16dafd92a8154acbc9a1187fdfe70252a6a92e Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.785826 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7z87w"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.801656 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.824111 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.829624 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.829937 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.329921288 +0000 UTC m=+141.670983902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.896615 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" event={"ID":"d6391e1f-a04c-44ae-9528-b80530aa1d4a","Type":"ContainerStarted","Data":"6410fa4a42e7a97d5ba10a99ff1a698dfc399b63dd7fbe7e583404d280d249a2"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.898306 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" event={"ID":"3d9897dc-10eb-4947-b5ce-63362338dfd1","Type":"ContainerStarted","Data":"969dfeb4c6ae8b7c7f7876f1fcaed4c38f26c2738839a26a486173b6921bd7cd"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.899780 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mp7c5" event={"ID":"a905c75c-cff5-4137-b8ee-212bfc0334ff","Type":"ContainerStarted","Data":"98a306529a3c37264621a55f212a1533c9aff448f01594e868b04759bbf780c6"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.900919 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" event={"ID":"891a06ac-8101-4fab-a947-2adf9d8eeb7f","Type":"ContainerStarted","Data":"99deeb7e93cadeea4bcb6943e2246cd8df9efaf8c2c9683d876a584bd4ad401e"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.901619 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" event={"ID":"fdf64638-6ce1-424a-af26-a49e98a29582","Type":"ContainerStarted","Data":"b5f2b32a378dc1809b81a023881fcdb89519e745a48b72889f4997ec968bc3b1"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.902719 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" event={"ID":"3a5f30d9-fc2d-47dc-8662-d649023f9521","Type":"ContainerStarted","Data":"e213d24a268d8975667627661bd97c76cf8253a79cd934c44c648266fc470678"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.903809 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" event={"ID":"811f6598-f603-4a15-8dec-add067d82d5c","Type":"ContainerStarted","Data":"c3316cb2c121789e4cbf93384a78074f73f1c4e84d8e13cc2ec0dbb5ba22f90e"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.904852 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" event={"ID":"4692b73d-a699-4ef5-82e8-cac30360e1b3","Type":"ContainerStarted","Data":"6d81a0540dc67f4867325edc27084bbb7052fad823915d410ae0e8b50b00212c"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.905965 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" event={"ID":"1d0ca542-fc9c-4849-ba80-c7fd15606fb5","Type":"ContainerStarted","Data":"8704a1e88e6a35cd2e15e53571d026fd4344911556b223f21d2263a0dc0a50ef"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.908074 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zb8r7" event={"ID":"7c949308-6fa4-47cf-9275-b4ddcdcbb30a","Type":"ContainerStarted","Data":"4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.909017 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" event={"ID":"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d","Type":"ContainerStarted","Data":"1f884271353133aa18fa9b65def15b96dd1bb3471d53a19bea9c893e5945c1fb"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.909923 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" event={"ID":"f9f44648-d5f1-49ee-a394-115e43c97fc9","Type":"ContainerStarted","Data":"a29d3bf0bd2b782009ec10ec573e33af37c3733cd9fd5454f7fa6c1ac84127b9"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.911651 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" event={"ID":"da0816a7-7a6a-40ac-a63a-3c26278426d8","Type":"ContainerStarted","Data":"9ca671da986d90d2479ac8e8e466e94335f385c40cd11326764b8829df6e06a4"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.913577 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" event={"ID":"d8fa1328-3653-44b4-803e-e9c41249bc4f","Type":"ContainerStarted","Data":"b40037ab2357196571112056f62b86d2983c4dfa2e46d8eeb764e29f88023358"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.914407 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" event={"ID":"0382f04b-cd24-421f-8bad-f147dc41bf91","Type":"ContainerStarted","Data":"9f463979d329e95b69ff9bed311ab1a003aa7ef93a9fb0275aa1b86319e67125"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.915285 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" event={"ID":"e68d4dc9-2488-4666-897a-f2ce27934d81","Type":"ContainerStarted","Data":"80c02c1fe7d9a5fc5fd9d832b1cf6cc5b6b4e77e30c36d456ed729056ca3bd92"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.916569 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" event={"ID":"a612c2a1-ac77-40f9-8cd9-18cf9f68b963","Type":"ContainerStarted","Data":"1156def8ddb055f531a624e521182707628bec18a5a4a6076654cbcef2ae9fdc"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.917335 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" event={"ID":"f1e52a75-bace-452e-989d-3907666cdd11","Type":"ContainerStarted","Data":"4350b19584b2aab5258769a02c16dafd92a8154acbc9a1187fdfe70252a6a92e"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.918051 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" event={"ID":"96c3620f-2835-41cc-8152-40ff0eb6db8c","Type":"ContainerStarted","Data":"c8ba5de104bc67cef653a109cd65e44ebae023099ce728b400af56681b2a9ecb"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.919170 4840 generic.go:334] "Generic (PLEG): container finished" podID="237e4a75-4edd-4622-87f4-03a1f620649d" containerID="d601e2cd93a98d1fef7f4c0c24acfd541fb1f5a2b148caa8046a2f3c7716d984" exitCode=0 Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.919272 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" event={"ID":"237e4a75-4edd-4622-87f4-03a1f620649d","Type":"ContainerDied","Data":"d601e2cd93a98d1fef7f4c0c24acfd541fb1f5a2b148caa8046a2f3c7716d984"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.920091 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qrgwq" event={"ID":"cf41adb4-ca77-4997-a2db-0e45bbe317c3","Type":"ContainerStarted","Data":"62cda13cd8188fcf4c2847e31895fd482eb9fcc7fcdf59be991d02ab3e0f5b4f"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.920914 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" event={"ID":"2b4c7f51-5133-456b-9923-61854cdcf098","Type":"ContainerStarted","Data":"614a9a5d53eec83531e5b780b47654757687d21c89749cc51fcce48a8a04457c"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.922738 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" event={"ID":"c1cd36b3-0ae5-4d77-a972-4521a0bed069","Type":"ContainerStarted","Data":"9452814dff6a3abb516223fd4591fd690563e7a8e0eb2cd23db7dfdc2c49ba2b"} Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.924332 4840 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-8t4w9 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.924443 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" podUID="6e933683-f464-453c-8228-97b31d8b1f42" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.930454 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:02 crc kubenswrapper[4840]: E1205 15:01:02.931361 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.431341876 +0000 UTC m=+141.772404490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.941977 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode281c913_e265_4ce8_af6a_11f255f6faf1.slice/crio-3993719440df688cf5c89028aa878da2143d393a79a48e3f842caaadf9d56bc6 WatchSource:0}: Error finding container 3993719440df688cf5c89028aa878da2143d393a79a48e3f842caaadf9d56bc6: Status 404 returned error can't find the container with id 3993719440df688cf5c89028aa878da2143d393a79a48e3f842caaadf9d56bc6 Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.946802 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70be4dce_113d_4659_b5dc_55c3c724de12.slice/crio-896e6270a2a65b9735791a0fc1d2edf53a78a2f285f07fcda6835f461cfb3631 WatchSource:0}: Error finding container 896e6270a2a65b9735791a0fc1d2edf53a78a2f285f07fcda6835f461cfb3631: Status 404 returned error can't find the container with id 896e6270a2a65b9735791a0fc1d2edf53a78a2f285f07fcda6835f461cfb3631 Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.947058 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-48665"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.953477 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.955703 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-5t5n2"] Dec 05 15:01:02 crc kubenswrapper[4840]: I1205 15:01:02.959143 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-g59rm"] Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.982324 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cbd5e8f_ec51_4c95_a2e0_43b06373d323.slice/crio-0894132e19e87c035453cc39028ef5cb5599aa2a0115ed252520bf7852f08cb9 WatchSource:0}: Error finding container 0894132e19e87c035453cc39028ef5cb5599aa2a0115ed252520bf7852f08cb9: Status 404 returned error can't find the container with id 0894132e19e87c035453cc39028ef5cb5599aa2a0115ed252520bf7852f08cb9 Dec 05 15:01:02 crc kubenswrapper[4840]: W1205 15:01:02.992975 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc25af517_aa72_4f6b_9271_e936daa641d7.slice/crio-6f10090122d1ad38b3759d00c753f3b26a4b51029b38069e86594836fd0053e9 WatchSource:0}: Error finding container 6f10090122d1ad38b3759d00c753f3b26a4b51029b38069e86594836fd0053e9: Status 404 returned error can't find the container with id 6f10090122d1ad38b3759d00c753f3b26a4b51029b38069e86594836fd0053e9 Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.032392 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.032977 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.53294506 +0000 UTC m=+141.874007674 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.136771 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.136893 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.636876419 +0000 UTC m=+141.977939033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.137180 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.137509 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.637501317 +0000 UTC m=+141.978563931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.239030 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.239289 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.739259614 +0000 UTC m=+142.080322228 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.239563 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.239893 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.739882062 +0000 UTC m=+142.080944676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.341121 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.341324 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.841267509 +0000 UTC m=+142.182330133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.341441 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.342264 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.842248497 +0000 UTC m=+142.183311111 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.442376 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.442899 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:03.942860522 +0000 UTC m=+142.283923136 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.544583 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.545033 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.045013962 +0000 UTC m=+142.386076626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.645509 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.645687 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.145661568 +0000 UTC m=+142.486724182 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.646041 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.646343 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.146335627 +0000 UTC m=+142.487398231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.747388 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.747780 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.247726844 +0000 UTC m=+142.588789458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.747962 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.748281 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.24826338 +0000 UTC m=+142.589325994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.848797 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.848984 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.348961498 +0000 UTC m=+142.690024112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.849058 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.849352 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.349333258 +0000 UTC m=+142.690395872 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.928740 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-48665" event={"ID":"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03","Type":"ContainerStarted","Data":"9eb4019fe26dee506d312452e51c1b2f985882d936434ca22f47ecf513e21332"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.930725 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" event={"ID":"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6","Type":"ContainerStarted","Data":"9cef40518ee075e6cb6da9e6b06220abfe6e6a4faa8b12bce2460a5488c390ac"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.931731 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" event={"ID":"70be4dce-113d-4659-b5dc-55c3c724de12","Type":"ContainerStarted","Data":"896e6270a2a65b9735791a0fc1d2edf53a78a2f285f07fcda6835f461cfb3631"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.932657 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5t5n2" event={"ID":"c25af517-aa72-4f6b-9271-e936daa641d7","Type":"ContainerStarted","Data":"6f10090122d1ad38b3759d00c753f3b26a4b51029b38069e86594836fd0053e9"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.933459 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" event={"ID":"8cbd5e8f-ec51-4c95-a2e0-43b06373d323","Type":"ContainerStarted","Data":"0894132e19e87c035453cc39028ef5cb5599aa2a0115ed252520bf7852f08cb9"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.934419 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" event={"ID":"478697e8-c090-46b8-adc9-88d2592e75a8","Type":"ContainerStarted","Data":"e0e1a11890d19f4acec98f259f8b18ad121bcfe0b2485ea95ce90a84da13a1d3"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.935348 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" event={"ID":"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7","Type":"ContainerStarted","Data":"a40caf079625a21da8a50a3415e321d5d03bc990aa2c58fdba37ded00604bd0f"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.936228 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" event={"ID":"e281c913-e265-4ce8-af6a-11f255f6faf1","Type":"ContainerStarted","Data":"3993719440df688cf5c89028aa878da2143d393a79a48e3f842caaadf9d56bc6"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.937577 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" event={"ID":"7acc3cd6-974f-4584-ac4d-d0e443052796","Type":"ContainerStarted","Data":"a6a566806b9466dd0837918ead07e04f09d0260ec853892e4c9e59eb003a8a5b"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.938580 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" event={"ID":"d87a8800-e3bd-4e0b-89c4-fe73193110fb","Type":"ContainerStarted","Data":"3338b95c4c49b5860bec87c723ddba663087475415b441d756551a2c33a6862b"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.940037 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" event={"ID":"80ec2d65-850c-4134-b5cd-f763de4964fb","Type":"ContainerStarted","Data":"672d615aa856016d3e0c18569c69b1c3d8475a4d1c7db23da7db11ebbca6221c"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.941462 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" event={"ID":"fdf64638-6ce1-424a-af26-a49e98a29582","Type":"ContainerStarted","Data":"ba22d8ef50cf8c590667ea174691b08273b324ab3e5509223e0daac888709bed"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.943529 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rbbsb" event={"ID":"ffde146e-eb04-4056-acb2-febc2da78e46","Type":"ContainerStarted","Data":"11a956238879e41d32114ff1b846a4d5c3816eb63ff7b9210765cfe75d1223c2"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.945443 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-zq9dg" event={"ID":"7102c26c-508b-485c-8a0e-8d35333641ce","Type":"ContainerStarted","Data":"f87c1bba5ac081e3eea9c4e5e9332ee28719ae3aa3a60b9c255cca1fb1802a12"} Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.949576 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.949718 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.449697125 +0000 UTC m=+142.790759759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.950094 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:03 crc kubenswrapper[4840]: E1205 15:01:03.950492 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.450481108 +0000 UTC m=+142.791543732 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.983575 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-zb8r7" podStartSLOduration=122.983551003 podStartE2EDuration="2m2.983551003s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:03.982115462 +0000 UTC m=+142.323178076" watchObservedRunningTime="2025-12-05 15:01:03.983551003 +0000 UTC m=+142.324613667" Dec 05 15:01:03 crc kubenswrapper[4840]: I1205 15:01:03.997314 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" podStartSLOduration=122.997291632 podStartE2EDuration="2m2.997291632s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:03.996557941 +0000 UTC m=+142.337620555" watchObservedRunningTime="2025-12-05 15:01:03.997291632 +0000 UTC m=+142.338354246" Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.050768 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.050936 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.550910638 +0000 UTC m=+142.891973242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.052668 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.053539 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.553168052 +0000 UTC m=+142.894230676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.154648 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.154704 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.654681873 +0000 UTC m=+142.995744487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.155391 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.155797 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.655785934 +0000 UTC m=+142.996848548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.256382 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.256700 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.756684938 +0000 UTC m=+143.097747552 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.357476 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.357789 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.857775816 +0000 UTC m=+143.198838420 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.459090 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.459535 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:04.959506784 +0000 UTC m=+143.300569408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.561058 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.561537 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.061516529 +0000 UTC m=+143.402579163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.662548 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.662710 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.16267443 +0000 UTC m=+143.503737054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.663102 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.663419 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.16340535 +0000 UTC m=+143.504467964 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.764405 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.764608 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.264578202 +0000 UTC m=+143.605640816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.764755 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.765145 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.265138378 +0000 UTC m=+143.606200982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:04 crc kubenswrapper[4840]: I1205 15:01:04.866603 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:04 crc kubenswrapper[4840]: E1205 15:01:04.867382 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.367365319 +0000 UTC m=+143.708427933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:04.985768 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:04.986403 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.486389135 +0000 UTC m=+143.827451749 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.163183 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.167631 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.66760073 +0000 UTC m=+144.008663344 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.183407 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" event={"ID":"8a02be23-a9cd-4ea6-9b6f-9e383856cb1d","Type":"ContainerStarted","Data":"8c77bab4f639a98cde782d9e18d00d52ea806ed89ff3eed28046a16587895448"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.185566 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" event={"ID":"8cbd5e8f-ec51-4c95-a2e0-43b06373d323","Type":"ContainerStarted","Data":"dcb006a960887a3b47e67605747f30606305f4721b41b786282e11080f05063a"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.189422 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" event={"ID":"96c3620f-2835-41cc-8152-40ff0eb6db8c","Type":"ContainerStarted","Data":"c63e3971fe72391e565865ca87ddb1e5fc2887aa6d641c982ae0621c017882ac"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.195223 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" event={"ID":"70be4dce-113d-4659-b5dc-55c3c724de12","Type":"ContainerStarted","Data":"8e45db5dc85e7c50bcd98a93e6e42a6c3aa7b086f5e71f52eee6410338bae161"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.197588 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" event={"ID":"f1e52a75-bace-452e-989d-3907666cdd11","Type":"ContainerStarted","Data":"c0f38ac9255452e40e70d105fed1ec6cb747adb348eea0f2f72d8e365bba495e"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.198584 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" event={"ID":"4692b73d-a699-4ef5-82e8-cac30360e1b3","Type":"ContainerStarted","Data":"68638b79b0faef4591285d938fd47aa1cf250cc48631abde28cb7caf8c63497a"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.200759 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qrgwq" event={"ID":"cf41adb4-ca77-4997-a2db-0e45bbe317c3","Type":"ContainerStarted","Data":"4ba2fcfcadaf1f0e10662cc1655c5e554d0d346b68c970459396881767ead2db"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.201741 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-48665" event={"ID":"0ccd57b5-93f1-4bf8-adf6-4c0b2c400e03","Type":"ContainerStarted","Data":"bd487edd7796812a758b0d891c17672533aea36fcf2d154bc09c59f68bdaff4b"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.203467 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" event={"ID":"478697e8-c090-46b8-adc9-88d2592e75a8","Type":"ContainerStarted","Data":"51dbdbb21e6c44daf711291e56c8d3321cd422287ab4efc6a9f54457251925b8"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.204549 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" event={"ID":"0382f04b-cd24-421f-8bad-f147dc41bf91","Type":"ContainerStarted","Data":"6ab20945ff7c47715d9b1c4402cc27f8776531e79f85024d16cf8f5c9ec0686b"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.207992 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" event={"ID":"e281c913-e265-4ce8-af6a-11f255f6faf1","Type":"ContainerStarted","Data":"785fbe3a1cc2a0c8613b1b90c5ed417a47a5688403172f1133c8ae23a287e346"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.216611 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" event={"ID":"2b4c7f51-5133-456b-9923-61854cdcf098","Type":"ContainerStarted","Data":"7f829540f789fe3ab2794671bae17153602cbd94f999dd82646cd8e3930e921f"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.219770 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-hvqs4" podStartSLOduration=124.219749855 podStartE2EDuration="2m4.219749855s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:05.217126951 +0000 UTC m=+143.558189565" watchObservedRunningTime="2025-12-05 15:01:05.219749855 +0000 UTC m=+143.560812469" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.224195 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" event={"ID":"d8fa1328-3653-44b4-803e-e9c41249bc4f","Type":"ContainerStarted","Data":"c2179ddb44795b83f5ad8a109b6dd50bf760e7be132e7474c66721a07e830e5a"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.245058 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" event={"ID":"362d5a3b-2574-499c-8142-0f3e4369f573","Type":"ContainerStarted","Data":"e7d6c8925f1a4d04e5ae2b40a5e0486d506b8230e986f031aec33e1fe3b4e679"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.262258 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" event={"ID":"3a5f30d9-fc2d-47dc-8662-d649023f9521","Type":"ContainerStarted","Data":"ae3f9e13e036441ba73bef9b8536152c980a861d1f517a238fe57e5932983b19"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.265588 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.266060 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.766044684 +0000 UTC m=+144.107107298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.270406 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" event={"ID":"d87a8800-e3bd-4e0b-89c4-fe73193110fb","Type":"ContainerStarted","Data":"036645b5b9a13be2b74db771eec6bf67a2b10b2f0c153192c6642ab9e1abb8dc"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.272988 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" event={"ID":"0421da7c-2216-4c4a-8422-2e3391a256b2","Type":"ContainerStarted","Data":"49d8db935879f47fdcbb6aa9a79d33fa4a1fa22fb69c20331fdcafd91d2efd9f"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.283239 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" event={"ID":"da0816a7-7a6a-40ac-a63a-3c26278426d8","Type":"ContainerStarted","Data":"e8706467c222a22783dfa3ffdf1945bde61737a41ac7ec45d88ff5653d5cdd6e"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.285055 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" event={"ID":"c1cd36b3-0ae5-4d77-a972-4521a0bed069","Type":"ContainerStarted","Data":"9dedf53f288f130aefdf12cb4c645124a99c1447be4db6c9bbe0f6da8f4ab61c"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.286771 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" event={"ID":"891a06ac-8101-4fab-a947-2adf9d8eeb7f","Type":"ContainerStarted","Data":"747d652def5ebf1361c2b8fdd018b6f1cbc64448f774beee5db7eb0e713e2ced"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.291265 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" event={"ID":"87967d07-5c59-4d94-a9c4-1a9f1058e0a2","Type":"ContainerStarted","Data":"a26fab8f66eb45caae98e7a613aa0e63c3531db6e65e304aeae15dfc847464b8"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.292942 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" event={"ID":"284f860e-20fc-48b2-91f2-a3e6fcb6b2c7","Type":"ContainerStarted","Data":"f618a2e789f9cab2b773cd1e528621b121bd98d06f514f897815a764e97eba9f"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.294990 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" event={"ID":"3d9897dc-10eb-4947-b5ce-63362338dfd1","Type":"ContainerStarted","Data":"9f19d404e5686a45396f59013a4a8941fa9adeab0b7af395fd4e5df921041741"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.296298 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mp7c5" event={"ID":"a905c75c-cff5-4137-b8ee-212bfc0334ff","Type":"ContainerStarted","Data":"fbc7e463b470136bad5d41356e9a56387e33d997cbab98a8c1c42ffa58cf627a"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.299488 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5t5n2" event={"ID":"c25af517-aa72-4f6b-9271-e936daa641d7","Type":"ContainerStarted","Data":"957fac2db4a8797351011a8d01cee74cd24c3315b16ad516905058d0d460bdd1"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.304331 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" event={"ID":"f9f44648-d5f1-49ee-a394-115e43c97fc9","Type":"ContainerStarted","Data":"c29e94fec24fa6a3b89007ce6172a247b2109b25d1ec64a168b4f2e563688ef8"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.306596 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" event={"ID":"1d0ca542-fc9c-4849-ba80-c7fd15606fb5","Type":"ContainerStarted","Data":"7509d1c44560cc671b582f9f9fbde309712c138836ac3b456d3ff0baf8b8b1d7"} Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.306988 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.307134 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.310332 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.310389 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.312908 4840 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-svf7z container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" start-of-body= Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.312945 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" podUID="811f6598-f603-4a15-8dec-add067d82d5c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.326728 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-htv8l" podStartSLOduration=124.32671293 podStartE2EDuration="2m4.32671293s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:05.325482745 +0000 UTC m=+143.666545359" watchObservedRunningTime="2025-12-05 15:01:05.32671293 +0000 UTC m=+143.667775544" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.366539 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.367826 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.867806912 +0000 UTC m=+144.208869536 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.390558 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-zq9dg" podStartSLOduration=8.390536715 podStartE2EDuration="8.390536715s" podCreationTimestamp="2025-12-05 15:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:05.362274646 +0000 UTC m=+143.703337260" watchObservedRunningTime="2025-12-05 15:01:05.390536715 +0000 UTC m=+143.731599329" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.390690 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-rbbsb" podStartSLOduration=124.390686219 podStartE2EDuration="2m4.390686219s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:05.390532795 +0000 UTC m=+143.731595409" watchObservedRunningTime="2025-12-05 15:01:05.390686219 +0000 UTC m=+143.731748833" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.415181 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" podStartSLOduration=124.415163051 podStartE2EDuration="2m4.415163051s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:05.414113332 +0000 UTC m=+143.755175956" watchObservedRunningTime="2025-12-05 15:01:05.415163051 +0000 UTC m=+143.756225665" Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.480300 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.482755 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:05.982738072 +0000 UTC m=+144.323800786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.593728 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.594176 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.094157173 +0000 UTC m=+144.435219787 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.694961 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.695439 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.195302674 +0000 UTC m=+144.536365298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.796317 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.796631 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.296617699 +0000 UTC m=+144.637680313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.897873 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.898192 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.398172011 +0000 UTC m=+144.739234625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.999566 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:05 crc kubenswrapper[4840]: E1205 15:01:05.999771 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.499742594 +0000 UTC m=+144.840805208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:05 crc kubenswrapper[4840]: I1205 15:01:05.999923 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.000266 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.500257528 +0000 UTC m=+144.841320222 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.101008 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.101714 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.601695747 +0000 UTC m=+144.942758361 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.231017 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.231448 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.731428547 +0000 UTC m=+145.072491161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.315891 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" event={"ID":"fdf64638-6ce1-424a-af26-a49e98a29582","Type":"ContainerStarted","Data":"34a2bc3b7a8a4a210ad236df9bc6c1e2665eaddc5fbc3270850767db6f8d6332"} Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.317604 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" event={"ID":"237e4a75-4edd-4622-87f4-03a1f620649d","Type":"ContainerStarted","Data":"9ccacc8a80ab6e263a4505eef5cb595bfe1c90057dcc8f6f117215498e835c93"} Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.317913 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.319173 4840 generic.go:334] "Generic (PLEG): container finished" podID="4692b73d-a699-4ef5-82e8-cac30360e1b3" containerID="68638b79b0faef4591285d938fd47aa1cf250cc48631abde28cb7caf8c63497a" exitCode=0 Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.319219 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" event={"ID":"4692b73d-a699-4ef5-82e8-cac30360e1b3","Type":"ContainerDied","Data":"68638b79b0faef4591285d938fd47aa1cf250cc48631abde28cb7caf8c63497a"} Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.321086 4840 generic.go:334] "Generic (PLEG): container finished" podID="d87a8800-e3bd-4e0b-89c4-fe73193110fb" containerID="036645b5b9a13be2b74db771eec6bf67a2b10b2f0c153192c6642ab9e1abb8dc" exitCode=0 Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.321223 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" event={"ID":"d87a8800-e3bd-4e0b-89c4-fe73193110fb","Type":"ContainerDied","Data":"036645b5b9a13be2b74db771eec6bf67a2b10b2f0c153192c6642ab9e1abb8dc"} Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.322966 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" event={"ID":"80ec2d65-850c-4134-b5cd-f763de4964fb","Type":"ContainerStarted","Data":"553146ab705a7dab87e5a67b44511091445e86e3706470c612970d0e3e02b9b3"} Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.323490 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.323536 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.333379 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.333598 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.833567235 +0000 UTC m=+145.174629859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.334060 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.334466 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.83445445 +0000 UTC m=+145.175517064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.337521 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" podStartSLOduration=125.337506346 podStartE2EDuration="2m5.337506346s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:05.47275869 +0000 UTC m=+143.813821304" watchObservedRunningTime="2025-12-05 15:01:06.337506346 +0000 UTC m=+144.678568960" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.339047 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" podStartSLOduration=125.33903736 podStartE2EDuration="2m5.33903736s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:06.336843478 +0000 UTC m=+144.677906112" watchObservedRunningTime="2025-12-05 15:01:06.33903736 +0000 UTC m=+144.680099974" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.395741 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-qskjn" podStartSLOduration=125.395726633 podStartE2EDuration="2m5.395726633s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:06.394258531 +0000 UTC m=+144.735321145" watchObservedRunningTime="2025-12-05 15:01:06.395726633 +0000 UTC m=+144.736789247" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.419315 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-j5lpd" podStartSLOduration=125.41929725 podStartE2EDuration="2m5.41929725s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:06.41753529 +0000 UTC m=+144.758597904" watchObservedRunningTime="2025-12-05 15:01:06.41929725 +0000 UTC m=+144.760359864" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.436209 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.436761 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.936736973 +0000 UTC m=+145.277799587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.437029 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.440926 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:06.940913321 +0000 UTC m=+145.281975935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.540265 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.540790 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.040775155 +0000 UTC m=+145.381837759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.616779 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4bdzq" podStartSLOduration=125.616753454 podStartE2EDuration="2m5.616753454s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:06.539811978 +0000 UTC m=+144.880874602" watchObservedRunningTime="2025-12-05 15:01:06.616753454 +0000 UTC m=+144.957816078" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.618364 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2fvvn" podStartSLOduration=125.618346789 podStartE2EDuration="2m5.618346789s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:06.615271742 +0000 UTC m=+144.956334356" watchObservedRunningTime="2025-12-05 15:01:06.618346789 +0000 UTC m=+144.959409413" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.643499 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.643906 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.143890252 +0000 UTC m=+145.484952866 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.745047 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.746925 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.246902465 +0000 UTC m=+145.587965079 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.847583 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.847990 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.347975243 +0000 UTC m=+145.689037857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.850297 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" podStartSLOduration=125.850283169 podStartE2EDuration="2m5.850283169s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:06.655131379 +0000 UTC m=+144.996193993" watchObservedRunningTime="2025-12-05 15:01:06.850283169 +0000 UTC m=+145.191345783" Dec 05 15:01:06 crc kubenswrapper[4840]: I1205 15:01:06.954312 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:06 crc kubenswrapper[4840]: E1205 15:01:06.954956 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.454924788 +0000 UTC m=+145.795987402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.001259 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pw7gj" podStartSLOduration=126.001239858 podStartE2EDuration="2m6.001239858s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:06.852883322 +0000 UTC m=+145.193945936" watchObservedRunningTime="2025-12-05 15:01:07.001239858 +0000 UTC m=+145.342302472" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.035832 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" podStartSLOduration=126.035804656 podStartE2EDuration="2m6.035804656s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.034676924 +0000 UTC m=+145.375739558" watchObservedRunningTime="2025-12-05 15:01:07.035804656 +0000 UTC m=+145.376867270" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.036949 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nzmkn" podStartSLOduration=126.036940008 podStartE2EDuration="2m6.036940008s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.003535423 +0000 UTC m=+145.344598067" watchObservedRunningTime="2025-12-05 15:01:07.036940008 +0000 UTC m=+145.378002622" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.055854 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.056250 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.556235163 +0000 UTC m=+145.897297777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.071393 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-r9h7q" podStartSLOduration=126.071372452 podStartE2EDuration="2m6.071372452s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.067299816 +0000 UTC m=+145.408362440" watchObservedRunningTime="2025-12-05 15:01:07.071372452 +0000 UTC m=+145.412435066" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.157998 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.158155 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.658122055 +0000 UTC m=+145.999184669 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.158799 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.159246 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.659229696 +0000 UTC m=+146.000292310 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.166691 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.170367 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-mp7c5" podStartSLOduration=126.170346191 podStartE2EDuration="2m6.170346191s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.166207204 +0000 UTC m=+145.507269808" watchObservedRunningTime="2025-12-05 15:01:07.170346191 +0000 UTC m=+145.511408805" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.170488 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-48665" podStartSLOduration=10.170482455 podStartE2EDuration="10.170482455s" podCreationTimestamp="2025-12-05 15:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.107668698 +0000 UTC m=+145.448731312" watchObservedRunningTime="2025-12-05 15:01:07.170482455 +0000 UTC m=+145.511545069" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.184185 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.186708 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.186766 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.201556 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-qrgwq" podStartSLOduration=126.201535003 podStartE2EDuration="2m6.201535003s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.200101162 +0000 UTC m=+145.541163776" watchObservedRunningTime="2025-12-05 15:01:07.201535003 +0000 UTC m=+145.542597617" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.259937 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-589pg" podStartSLOduration=126.259918674 podStartE2EDuration="2m6.259918674s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.228991359 +0000 UTC m=+145.570053973" watchObservedRunningTime="2025-12-05 15:01:07.259918674 +0000 UTC m=+145.600981288" Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.260277 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.760202042 +0000 UTC m=+146.101264656 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.260116 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.261303 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.261777 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.761764276 +0000 UTC m=+146.102826890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.298163 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" podStartSLOduration=126.298147645 podStartE2EDuration="2m6.298147645s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.269171006 +0000 UTC m=+145.610233610" watchObservedRunningTime="2025-12-05 15:01:07.298147645 +0000 UTC m=+145.639210259" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.366512 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.366949 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.86691981 +0000 UTC m=+146.207982424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.372411 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" event={"ID":"a612c2a1-ac77-40f9-8cd9-18cf9f68b963","Type":"ContainerStarted","Data":"680ef84b73157e768ef8d6c655f055bf655fd9a5f4d60b8a5e4c10dcdb73f3d0"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.375243 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-zsj6v" podStartSLOduration=126.375224205 podStartE2EDuration="2m6.375224205s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.344422874 +0000 UTC m=+145.685485488" watchObservedRunningTime="2025-12-05 15:01:07.375224205 +0000 UTC m=+145.716286819" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.377567 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" podStartSLOduration=67.377560101 podStartE2EDuration="1m7.377560101s" podCreationTimestamp="2025-12-05 15:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.3736164 +0000 UTC m=+145.714679024" watchObservedRunningTime="2025-12-05 15:01:07.377560101 +0000 UTC m=+145.718622715" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.400905 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-x52gm" podStartSLOduration=126.40086064 podStartE2EDuration="2m6.40086064s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.398573725 +0000 UTC m=+145.739636349" watchObservedRunningTime="2025-12-05 15:01:07.40086064 +0000 UTC m=+145.741923264" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.415336 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" event={"ID":"478697e8-c090-46b8-adc9-88d2592e75a8","Type":"ContainerStarted","Data":"29d476a43043d3f7db68f77d8fda2c0934f9f7b1a5cf646c7adfe50809a21ffd"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.428369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5t5n2" event={"ID":"c25af517-aa72-4f6b-9271-e936daa641d7","Type":"ContainerStarted","Data":"70aa068a35a8d069d64ff7fc42b5d2d993b14f6bf7e1ba42e441257666688857"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.429051 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.442637 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" event={"ID":"3d9897dc-10eb-4947-b5ce-63362338dfd1","Type":"ContainerStarted","Data":"10bf7ff0eb4186fb4782224595b79e52f0608ae6b75d949f84199e0a06fcc3f7"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.454147 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-7z87w" podStartSLOduration=126.454132596 podStartE2EDuration="2m6.454132596s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.453047185 +0000 UTC m=+145.794109799" watchObservedRunningTime="2025-12-05 15:01:07.454132596 +0000 UTC m=+145.795195210" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.490824 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.492467 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:07.992452049 +0000 UTC m=+146.333514663 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.503791 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" event={"ID":"1d0ca542-fc9c-4849-ba80-c7fd15606fb5","Type":"ContainerStarted","Data":"3f4ff20403dd7003df9e72763eae6f2ec82aed8e9ecda52d749f91b3f30296fc"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.510442 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-7t8bh" podStartSLOduration=126.510423948 podStartE2EDuration="2m6.510423948s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.506669252 +0000 UTC m=+145.847731866" watchObservedRunningTime="2025-12-05 15:01:07.510423948 +0000 UTC m=+145.851486562" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.514802 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" event={"ID":"c1cd36b3-0ae5-4d77-a972-4521a0bed069","Type":"ContainerStarted","Data":"cfac41746b69ab033ab0693f856d329086dcc2ec021afb66b8348c266bf38b93"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.515534 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.520918 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" event={"ID":"891a06ac-8101-4fab-a947-2adf9d8eeb7f","Type":"ContainerStarted","Data":"aab8d686dea6222dbe732d9a089e61d067a9cd7a10acfd5edc3dcb67c185e306"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.531790 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" event={"ID":"4692b73d-a699-4ef5-82e8-cac30360e1b3","Type":"ContainerStarted","Data":"1773e4bb0d1505d8aa8bc20b3d552e2ad3da1b6592885b0830d5a4b8cebfd20f"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.554393 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" event={"ID":"d87a8800-e3bd-4e0b-89c4-fe73193110fb","Type":"ContainerStarted","Data":"b1dbd8ac53eec9a5be8de3194a841ac81d9195a6b12f5e8512268824c7a12f4e"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.562145 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" event={"ID":"e68d4dc9-2488-4666-897a-f2ce27934d81","Type":"ContainerStarted","Data":"099802d56630d09e0edf7966c0a6b7fc6df7c080e1287716efbefc9513c5f9b7"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.564918 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" event={"ID":"2b4c7f51-5133-456b-9923-61854cdcf098","Type":"ContainerStarted","Data":"ee78980e8d22524c9402b3414d0f8c8932f04221cc983e28fbe59da89524791e"} Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.594386 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.595441 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.095421261 +0000 UTC m=+146.436483875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.694254 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fjcvg" podStartSLOduration=126.694232716 podStartE2EDuration="2m6.694232716s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.634180497 +0000 UTC m=+145.975243111" watchObservedRunningTime="2025-12-05 15:01:07.694232716 +0000 UTC m=+146.035295360" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.701833 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.703459 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.203446616 +0000 UTC m=+146.544509230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.772286 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jh844" podStartSLOduration=126.772265323 podStartE2EDuration="2m6.772265323s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.701978505 +0000 UTC m=+146.043041129" watchObservedRunningTime="2025-12-05 15:01:07.772265323 +0000 UTC m=+146.113327937" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.773157 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" podStartSLOduration=126.773150038 podStartE2EDuration="2m6.773150038s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.746860464 +0000 UTC m=+146.087923078" watchObservedRunningTime="2025-12-05 15:01:07.773150038 +0000 UTC m=+146.114212652" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.808037 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.808483 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.308466617 +0000 UTC m=+146.649529231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.809105 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2z4bf"] Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.810108 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.811809 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-5t5n2" podStartSLOduration=10.81178605 podStartE2EDuration="10.81178605s" podCreationTimestamp="2025-12-05 15:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.807498879 +0000 UTC m=+146.148561493" watchObservedRunningTime="2025-12-05 15:01:07.81178605 +0000 UTC m=+146.152848664" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.814146 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.837288 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2z4bf"] Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.839647 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-69w55" podStartSLOduration=126.839629578 podStartE2EDuration="2m6.839629578s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.836048077 +0000 UTC m=+146.177110681" watchObservedRunningTime="2025-12-05 15:01:07.839629578 +0000 UTC m=+146.180692192" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.913201 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-utilities\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.913253 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.913315 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsp47\" (UniqueName: \"kubernetes.io/projected/a6138547-0c56-4951-bab0-283fe8649655-kube-api-access-lsp47\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:07 crc kubenswrapper[4840]: I1205 15:01:07.913334 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-catalog-content\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:07 crc kubenswrapper[4840]: E1205 15:01:07.913606 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.41359459 +0000 UTC m=+146.754657204 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.000750 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" podStartSLOduration=127.000730984 podStartE2EDuration="2m7.000730984s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:07.890406704 +0000 UTC m=+146.231469338" watchObservedRunningTime="2025-12-05 15:01:08.000730984 +0000 UTC m=+146.341793598" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.001771 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q9jjl"] Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.002947 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.013816 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-tqkgt" podStartSLOduration=127.013794593 podStartE2EDuration="2m7.013794593s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:08.012916799 +0000 UTC m=+146.353979433" watchObservedRunningTime="2025-12-05 15:01:08.013794593 +0000 UTC m=+146.354857207" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.014634 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.014896 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-catalog-content\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.014974 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-utilities\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.015077 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsp47\" (UniqueName: \"kubernetes.io/projected/a6138547-0c56-4951-bab0-283fe8649655-kube-api-access-lsp47\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.015400 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.515384358 +0000 UTC m=+146.856446972 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.015738 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-catalog-content\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.015970 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-utilities\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.051384 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.091402 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsp47\" (UniqueName: \"kubernetes.io/projected/a6138547-0c56-4951-bab0-283fe8649655-kube-api-access-lsp47\") pod \"community-operators-2z4bf\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.118687 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q9jjl"] Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.122206 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-utilities\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.122252 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw7nd\" (UniqueName: \"kubernetes.io/projected/516530e0-a660-4755-8d26-b7c798a43428-kube-api-access-tw7nd\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.122337 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-catalog-content\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.122404 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.122703 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.622689063 +0000 UTC m=+146.963751737 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.182084 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.188934 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:08 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:08 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:08 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.189024 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.203418 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xc72x"] Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.204742 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.224107 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.224250 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.724224925 +0000 UTC m=+147.065287539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.224380 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.224429 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-utilities\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.224444 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw7nd\" (UniqueName: \"kubernetes.io/projected/516530e0-a660-4755-8d26-b7c798a43428-kube-api-access-tw7nd\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.224503 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-catalog-content\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.224893 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-catalog-content\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.224936 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.724919315 +0000 UTC m=+147.065981929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.224981 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-utilities\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.264796 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-xrctg" podStartSLOduration=127.264777642 podStartE2EDuration="2m7.264777642s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:08.263949698 +0000 UTC m=+146.605012312" watchObservedRunningTime="2025-12-05 15:01:08.264777642 +0000 UTC m=+146.605840256" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.267382 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xc72x"] Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.297525 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw7nd\" (UniqueName: \"kubernetes.io/projected/516530e0-a660-4755-8d26-b7c798a43428-kube-api-access-tw7nd\") pod \"certified-operators-q9jjl\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.333335 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.333552 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7brzn\" (UniqueName: \"kubernetes.io/projected/d59d7b71-22f9-49c1-9415-f420122f72df-kube-api-access-7brzn\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.333581 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-utilities\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.333646 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-catalog-content\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.333840 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.334462 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.834444622 +0000 UTC m=+147.175507236 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.396270 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-9lj8m" podStartSLOduration=127.39625274 podStartE2EDuration="2m7.39625274s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:08.366965422 +0000 UTC m=+146.708028036" watchObservedRunningTime="2025-12-05 15:01:08.39625274 +0000 UTC m=+146.737315354" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.397330 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vgz4z"] Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.398207 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.442695 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-utilities\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.443196 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-catalog-content\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.443237 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.443309 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgz4z"] Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.443350 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7brzn\" (UniqueName: \"kubernetes.io/projected/d59d7b71-22f9-49c1-9415-f420122f72df-kube-api-access-7brzn\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.443648 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-utilities\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.443959 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:08.943943659 +0000 UTC m=+147.285006343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.444862 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-catalog-content\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.492263 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.545228 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.545390 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-catalog-content\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.545438 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-utilities\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.545561 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c72mj\" (UniqueName: \"kubernetes.io/projected/2b685425-9cba-4168-a2a6-a4a707989b01-kube-api-access-c72mj\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.545685 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.045668796 +0000 UTC m=+147.386731410 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.549336 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7brzn\" (UniqueName: \"kubernetes.io/projected/d59d7b71-22f9-49c1-9415-f420122f72df-kube-api-access-7brzn\") pod \"community-operators-xc72x\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.594832 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" event={"ID":"d87a8800-e3bd-4e0b-89c4-fe73193110fb","Type":"ContainerStarted","Data":"4fae89a7c01bad08bf71fdcb3e9a8b74474d55106440f4f3683c3d453d8d0458"} Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.665804 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-utilities\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.665948 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.666012 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c72mj\" (UniqueName: \"kubernetes.io/projected/2b685425-9cba-4168-a2a6-a4a707989b01-kube-api-access-c72mj\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.666079 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-catalog-content\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.667595 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-utilities\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.668303 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.168288584 +0000 UTC m=+147.509351198 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.669382 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-catalog-content\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.918016 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.918799 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:08 crc kubenswrapper[4840]: I1205 15:01:08.919688 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c72mj\" (UniqueName: \"kubernetes.io/projected/2b685425-9cba-4168-a2a6-a4a707989b01-kube-api-access-c72mj\") pod \"certified-operators-vgz4z\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:08 crc kubenswrapper[4840]: E1205 15:01:08.920287 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.42027152 +0000 UTC m=+147.761334134 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.021277 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.021728 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.521710169 +0000 UTC m=+147.862772783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.027284 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.086213 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" podStartSLOduration=128.086196513 podStartE2EDuration="2m8.086196513s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:08.814569361 +0000 UTC m=+147.155631975" watchObservedRunningTime="2025-12-05 15:01:09.086196513 +0000 UTC m=+147.427259127" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.086529 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q9jjl"] Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.121997 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.122306 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.622289253 +0000 UTC m=+147.963351867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.188984 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:09 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:09 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:09 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.189033 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.233163 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.233509 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.733498038 +0000 UTC m=+148.074560652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.315155 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2z4bf"] Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.336265 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.336573 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.836557853 +0000 UTC m=+148.177620467 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: W1205 15:01:09.422500 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6138547_0c56_4951_bab0_283fe8649655.slice/crio-d762001bb50fef73518ab512bdc0b9b9eae44817a27bfb3db034933b38517ce5 WatchSource:0}: Error finding container d762001bb50fef73518ab512bdc0b9b9eae44817a27bfb3db034933b38517ce5: Status 404 returned error can't find the container with id d762001bb50fef73518ab512bdc0b9b9eae44817a27bfb3db034933b38517ce5 Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.437239 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.437589 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:09.93757771 +0000 UTC m=+148.278640314 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.541308 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.541722 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.041707105 +0000 UTC m=+148.382769719 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.611858 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9jjl" event={"ID":"516530e0-a660-4755-8d26-b7c798a43428","Type":"ContainerStarted","Data":"529600e2fa37a104b52d3ec5d17acbc240a428f6ebc5935ca711afc2b28103d1"} Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.614179 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z4bf" event={"ID":"a6138547-0c56-4951-bab0-283fe8649655","Type":"ContainerStarted","Data":"d762001bb50fef73518ab512bdc0b9b9eae44817a27bfb3db034933b38517ce5"} Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.647140 4840 generic.go:334] "Generic (PLEG): container finished" podID="70be4dce-113d-4659-b5dc-55c3c724de12" containerID="8e45db5dc85e7c50bcd98a93e6e42a6c3aa7b086f5e71f52eee6410338bae161" exitCode=0 Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.647199 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" event={"ID":"70be4dce-113d-4659-b5dc-55c3c724de12","Type":"ContainerDied","Data":"8e45db5dc85e7c50bcd98a93e6e42a6c3aa7b086f5e71f52eee6410338bae161"} Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.647664 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.647945 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.147934289 +0000 UTC m=+148.488996903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.657334 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" event={"ID":"e68d4dc9-2488-4666-897a-f2ce27934d81","Type":"ContainerStarted","Data":"9013b6f84c28fcf1a9cd52e8969a10e5171d52451137653ab027ef2e6663ca89"} Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.727366 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xc72x"] Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.749441 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.749534 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.249515492 +0000 UTC m=+148.590578106 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.750010 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.753501 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.253487634 +0000 UTC m=+148.594550248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.823410 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.837736 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.837843 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.845350 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.845463 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.850949 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.851102 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4fae2445-a175-44cf-8873-684235999c95-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.851130 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4fae2445-a175-44cf-8873-684235999c95-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.851316 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.351296891 +0000 UTC m=+148.692359505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.951760 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4fae2445-a175-44cf-8873-684235999c95-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.951800 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4fae2445-a175-44cf-8873-684235999c95-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.951897 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:09 crc kubenswrapper[4840]: E1205 15:01:09.952178 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.452166203 +0000 UTC m=+148.793228817 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.952327 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4fae2445-a175-44cf-8873-684235999c95-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:09 crc kubenswrapper[4840]: I1205 15:01:09.971149 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgz4z"] Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.047185 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4fae2445-a175-44cf-8873-684235999c95-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.055957 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.056330 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.556311789 +0000 UTC m=+148.897374403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.059029 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.146512 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-scqls"] Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.149698 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162193 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-catalog-content\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162218 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162260 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-utilities\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162298 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162336 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162358 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162384 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lzst\" (UniqueName: \"kubernetes.io/projected/fc195b16-d39f-4786-a866-aab4d3377d52-kube-api-access-8lzst\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.162407 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.163658 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.663644014 +0000 UTC m=+149.004706678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.165259 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.168322 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.175639 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.190383 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.204073 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:10 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:10 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:10 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.204121 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.260330 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqls"] Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.263816 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.274493 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lzst\" (UniqueName: \"kubernetes.io/projected/fc195b16-d39f-4786-a866-aab4d3377d52-kube-api-access-8lzst\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.274634 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-catalog-content\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.286191 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-utilities\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.286263 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.287739 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-catalog-content\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.288375 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.788360581 +0000 UTC m=+149.129423195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.288749 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-utilities\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.297215 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.303929 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.323677 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.381798 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lzst\" (UniqueName: \"kubernetes.io/projected/fc195b16-d39f-4786-a866-aab4d3377d52-kube-api-access-8lzst\") pod \"redhat-marketplace-scqls\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.403506 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.403630 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.403957 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:10.90394543 +0000 UTC m=+149.245008044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.455178 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7vcg4" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.484148 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.504462 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.505599 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.005583505 +0000 UTC m=+149.346646119 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.543250 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.543294 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.558494 4840 patch_prober.go:28] interesting pod/console-f9d7485db-zb8r7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.558553 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zb8r7" podUID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.596815 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.597501 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.606616 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.607330 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.608029 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.108013092 +0000 UTC m=+149.449075776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.641788 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gwvqf"] Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.642744 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.685941 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.703796 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" event={"ID":"e68d4dc9-2488-4666-897a-f2ce27934d81","Type":"ContainerStarted","Data":"4ff5fe5d8a8693593e3a00f458c3ce70a63a477d185fafc28fef2016ab854cc4"} Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.707704 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.708177 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmsqh\" (UniqueName: \"kubernetes.io/projected/c6e0195b-10e8-465d-9e3d-548633d29ed7-kube-api-access-rmsqh\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.708234 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-catalog-content\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.708317 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-utilities\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.707911 4840 generic.go:334] "Generic (PLEG): container finished" podID="516530e0-a660-4755-8d26-b7c798a43428" containerID="aa47b781797f481e47dda7b92394d6bae53a858c5bc956410477abaf003380bc" exitCode=0 Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.707930 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9jjl" event={"ID":"516530e0-a660-4755-8d26-b7c798a43428","Type":"ContainerDied","Data":"aa47b781797f481e47dda7b92394d6bae53a858c5bc956410477abaf003380bc"} Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.709149 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.209132001 +0000 UTC m=+149.550194615 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.716408 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.727531 4840 generic.go:334] "Generic (PLEG): container finished" podID="a6138547-0c56-4951-bab0-283fe8649655" containerID="7f76fa8b9f344c1476d3c2bcc64093413948b2c5ce05342d5b0a82435a0ab5b9" exitCode=0 Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.727778 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z4bf" event={"ID":"a6138547-0c56-4951-bab0-283fe8649655","Type":"ContainerDied","Data":"7f76fa8b9f344c1476d3c2bcc64093413948b2c5ce05342d5b0a82435a0ab5b9"} Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.728490 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.760991 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.761054 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.761104 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.761048 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.761271 4840 generic.go:334] "Generic (PLEG): container finished" podID="2b685425-9cba-4168-a2a6-a4a707989b01" containerID="b3a697caf600ee4e6c9f253f5968dfb9b3a01a0341f2db3690062996b72de81c" exitCode=0 Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.761526 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgz4z" event={"ID":"2b685425-9cba-4168-a2a6-a4a707989b01","Type":"ContainerDied","Data":"b3a697caf600ee4e6c9f253f5968dfb9b3a01a0341f2db3690062996b72de81c"} Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.761634 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgz4z" event={"ID":"2b685425-9cba-4168-a2a6-a4a707989b01","Type":"ContainerStarted","Data":"0c515eeef51c278fa58010986c6bad649794610bb07d53d2f5c35df56fcb65eb"} Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.766493 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwvqf"] Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.784321 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.785087 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.792628 4840 generic.go:334] "Generic (PLEG): container finished" podID="d59d7b71-22f9-49c1-9415-f420122f72df" containerID="7b83c7ab857dfadc8187e49f0a8fce9f6bc0353c5c1318c6f998b94b29c94d07" exitCode=0 Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.793610 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xc72x" event={"ID":"d59d7b71-22f9-49c1-9415-f420122f72df","Type":"ContainerDied","Data":"7b83c7ab857dfadc8187e49f0a8fce9f6bc0353c5c1318c6f998b94b29c94d07"} Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.793635 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xc72x" event={"ID":"d59d7b71-22f9-49c1-9415-f420122f72df","Type":"ContainerStarted","Data":"5ded5dc137717b428e636ca72a0ea30fdf38a92f2c0e5fea69fc2802a6d44498"} Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.805950 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.811748 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmsqh\" (UniqueName: \"kubernetes.io/projected/c6e0195b-10e8-465d-9e3d-548633d29ed7-kube-api-access-rmsqh\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.811800 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-catalog-content\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.811834 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.811903 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-utilities\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.813808 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-catalog-content\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.813859 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-utilities\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.814150 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.314133861 +0000 UTC m=+149.655196545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.878069 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmsqh\" (UniqueName: \"kubernetes.io/projected/c6e0195b-10e8-465d-9e3d-548633d29ed7-kube-api-access-rmsqh\") pod \"redhat-marketplace-gwvqf\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.913914 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.913983 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.413966974 +0000 UTC m=+149.755029588 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.914457 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:10 crc kubenswrapper[4840]: E1205 15:01:10.914716 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.414709175 +0000 UTC m=+149.755771779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.916818 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.933127 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-mp7c5" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.966043 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.967145 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.967752 4840 patch_prober.go:28] interesting pod/apiserver-76f77b778f-g59rm container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.967780 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" podUID="d87a8800-e3bd-4e0b-89c4-fe73193110fb" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.967912 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hdjqt"] Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.968944 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:10 crc kubenswrapper[4840]: I1205 15:01:10.985993 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.002502 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.002886 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.016952 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.017064 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.017249 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-utilities\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.017299 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-catalog-content\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.017408 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpcnj\" (UniqueName: \"kubernetes.io/projected/a8f9a961-ee6b-429f-b07e-12ee35a7c986-kube-api-access-tpcnj\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.018189 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.518166141 +0000 UTC m=+149.859228805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.019605 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hdjqt"] Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.053191 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tn7k5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.060203 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-7bdp5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.097410 4840 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.127685 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpcnj\" (UniqueName: \"kubernetes.io/projected/a8f9a961-ee6b-429f-b07e-12ee35a7c986-kube-api-access-tpcnj\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.127944 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.128055 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-utilities\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.128075 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-catalog-content\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.128458 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-catalog-content\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.129013 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.629000035 +0000 UTC m=+149.970062709 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.129221 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-utilities\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.139062 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sm6c5"] Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.140208 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.196518 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sm6c5"] Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.196585 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.218905 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:11 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:11 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:11 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.218959 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.231280 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.231503 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-catalog-content\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.231557 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfzj8\" (UniqueName: \"kubernetes.io/projected/a7d16875-422c-4a41-8fd2-498bb020ab9a-kube-api-access-cfzj8\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.231576 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-utilities\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.231681 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.731667698 +0000 UTC m=+150.072730312 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.310013 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpcnj\" (UniqueName: \"kubernetes.io/projected/a8f9a961-ee6b-429f-b07e-12ee35a7c986-kube-api-access-tpcnj\") pod \"redhat-operators-hdjqt\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.340373 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-catalog-content\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.340468 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.340509 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfzj8\" (UniqueName: \"kubernetes.io/projected/a7d16875-422c-4a41-8fd2-498bb020ab9a-kube-api-access-cfzj8\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.340541 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-utilities\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.341235 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.841218277 +0000 UTC m=+150.182280891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.342213 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-utilities\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.343506 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-catalog-content\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.343824 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.404635 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.408350 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfzj8\" (UniqueName: \"kubernetes.io/projected/a7d16875-422c-4a41-8fd2-498bb020ab9a-kube-api-access-cfzj8\") pod \"redhat-operators-sm6c5\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.442819 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.443774 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:11.943757287 +0000 UTC m=+150.284819901 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.498565 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.547620 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.547980 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:12.047966724 +0000 UTC m=+150.389029338 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.652578 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.653076 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 15:01:12.153055006 +0000 UTC m=+150.494117620 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.751614 4840 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T15:01:11.097445273Z","Handler":null,"Name":""} Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.758532 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:11 crc kubenswrapper[4840]: E1205 15:01:11.758895 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 15:01:12.258882329 +0000 UTC m=+150.599944943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xsrwv" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.816662 4840 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.816861 4840 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.853084 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"903ace3ca76de85e83c8053307f119d114a152c27fb92edbeb20bcc41ee85d09"} Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.860475 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.861015 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.861627 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.877775 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.886710 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4fae2445-a175-44cf-8873-684235999c95","Type":"ContainerStarted","Data":"8ed6b644b15b058589df8e8bac8b70e32c0b1875d24dc8c5bf214dee02a72e52"} Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.886950 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.887167 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.905798 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.953572 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" event={"ID":"e68d4dc9-2488-4666-897a-f2ce27934d81","Type":"ContainerStarted","Data":"54442605bd26581fcef544fb8890d5f3c2b02f4f0796b97cc485190673b61b3f"} Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.961568 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4nr2" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.962201 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/daa13445-a3cd-499b-9cee-0a10057f274a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.962247 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/daa13445-a3cd-499b-9cee-0a10057f274a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:11 crc kubenswrapper[4840]: I1205 15:01:11.962292 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.066675 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/daa13445-a3cd-499b-9cee-0a10057f274a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.067564 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/daa13445-a3cd-499b-9cee-0a10057f274a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.067684 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/daa13445-a3cd-499b-9cee-0a10057f274a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.068608 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-fnhqw" podStartSLOduration=15.068594808 podStartE2EDuration="15.068594808s" podCreationTimestamp="2025-12-05 15:00:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:12.036342796 +0000 UTC m=+150.377405410" watchObservedRunningTime="2025-12-05 15:01:12.068594808 +0000 UTC m=+150.409657422" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.125884 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/daa13445-a3cd-499b-9cee-0a10057f274a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.131955 4840 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.131987 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.173633 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.187917 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.193112 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:12 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:12 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:12 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.193159 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.244080 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.255592 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xsrwv\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.274051 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6xgx\" (UniqueName: \"kubernetes.io/projected/70be4dce-113d-4659-b5dc-55c3c724de12-kube-api-access-w6xgx\") pod \"70be4dce-113d-4659-b5dc-55c3c724de12\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.274164 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70be4dce-113d-4659-b5dc-55c3c724de12-config-volume\") pod \"70be4dce-113d-4659-b5dc-55c3c724de12\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.274239 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70be4dce-113d-4659-b5dc-55c3c724de12-secret-volume\") pod \"70be4dce-113d-4659-b5dc-55c3c724de12\" (UID: \"70be4dce-113d-4659-b5dc-55c3c724de12\") " Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.277642 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqls"] Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.277971 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70be4dce-113d-4659-b5dc-55c3c724de12-config-volume" (OuterVolumeSpecName: "config-volume") pod "70be4dce-113d-4659-b5dc-55c3c724de12" (UID: "70be4dce-113d-4659-b5dc-55c3c724de12"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.287079 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70be4dce-113d-4659-b5dc-55c3c724de12-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "70be4dce-113d-4659-b5dc-55c3c724de12" (UID: "70be4dce-113d-4659-b5dc-55c3c724de12"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.287730 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70be4dce-113d-4659-b5dc-55c3c724de12-kube-api-access-w6xgx" (OuterVolumeSpecName: "kube-api-access-w6xgx") pod "70be4dce-113d-4659-b5dc-55c3c724de12" (UID: "70be4dce-113d-4659-b5dc-55c3c724de12"). InnerVolumeSpecName "kube-api-access-w6xgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:01:12 crc kubenswrapper[4840]: W1205 15:01:12.331766 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc195b16_d39f_4786_a866_aab4d3377d52.slice/crio-1ff0c16917400c645491dc60b5266862d7f10652f69e561d67de1080fa459b99 WatchSource:0}: Error finding container 1ff0c16917400c645491dc60b5266862d7f10652f69e561d67de1080fa459b99: Status 404 returned error can't find the container with id 1ff0c16917400c645491dc60b5266862d7f10652f69e561d67de1080fa459b99 Dec 05 15:01:12 crc kubenswrapper[4840]: W1205 15:01:12.359143 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-4681d207e30d6d112b24843383914ded16f815bb6ce895dad66efc86227d3485 WatchSource:0}: Error finding container 4681d207e30d6d112b24843383914ded16f815bb6ce895dad66efc86227d3485: Status 404 returned error can't find the container with id 4681d207e30d6d112b24843383914ded16f815bb6ce895dad66efc86227d3485 Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.390096 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6xgx\" (UniqueName: \"kubernetes.io/projected/70be4dce-113d-4659-b5dc-55c3c724de12-kube-api-access-w6xgx\") on node \"crc\" DevicePath \"\"" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.390121 4840 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70be4dce-113d-4659-b5dc-55c3c724de12-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.390134 4840 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70be4dce-113d-4659-b5dc-55c3c724de12-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.420411 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwvqf"] Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.478364 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sm6c5"] Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.551332 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.598211 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hdjqt"] Dec 05 15:01:12 crc kubenswrapper[4840]: W1205 15:01:12.672025 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8f9a961_ee6b_429f_b07e_12ee35a7c986.slice/crio-ec0433b010919d2f78398e9e3f824f93f7f9af676c78381bc5a472a2208dca86 WatchSource:0}: Error finding container ec0433b010919d2f78398e9e3f824f93f7f9af676c78381bc5a472a2208dca86: Status 404 returned error can't find the container with id ec0433b010919d2f78398e9e3f824f93f7f9af676c78381bc5a472a2208dca86 Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.850024 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.976302 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xsrwv"] Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.984133 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerStarted","Data":"5dc3451197173ee17564fb9974b4a24c574d5d1c1a03310250de6ed13d4ed92e"} Dec 05 15:01:12 crc kubenswrapper[4840]: I1205 15:01:12.984186 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerStarted","Data":"1ff0c16917400c645491dc60b5266862d7f10652f69e561d67de1080fa459b99"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.023278 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" event={"ID":"70be4dce-113d-4659-b5dc-55c3c724de12","Type":"ContainerDied","Data":"896e6270a2a65b9735791a0fc1d2edf53a78a2f285f07fcda6835f461cfb3631"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.023318 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="896e6270a2a65b9735791a0fc1d2edf53a78a2f285f07fcda6835f461cfb3631" Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.023469 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq" Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.052397 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2626f240c7b06f557bcd685ab5d72c96f3f0694f21cd019993d7ef27309a0da6"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.073974 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6ec98ef2c73be7d7feab3f0b69b3f84f8cf5c90ffe5028c9c7d073f64987b2fa"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.074015 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"03c98a5688ead36f16f5237cd5b7c20b6b65a17a0d0c1d3549844da3cc87d00c"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.074537 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.083756 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4fae2445-a175-44cf-8873-684235999c95","Type":"ContainerStarted","Data":"6f55234ac70feba8559b06c1fef8565fc1d08296ceafc9a269997fa241d12383"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.115159 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdjqt" event={"ID":"a8f9a961-ee6b-429f-b07e-12ee35a7c986","Type":"ContainerStarted","Data":"ec0433b010919d2f78398e9e3f824f93f7f9af676c78381bc5a472a2208dca86"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.121603 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4681d207e30d6d112b24843383914ded16f815bb6ce895dad66efc86227d3485"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.123895 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"daa13445-a3cd-499b-9cee-0a10057f274a","Type":"ContainerStarted","Data":"586341d67a6b255ad0132fee2ef9e49bfc25afa768b95c0d242bbfe973c36f7a"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.125264 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerStarted","Data":"6097bb62d3c707cbd4806c7ff7d94a0899a3fbc0820afbb18b5ae18d9c3836f2"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.125283 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerStarted","Data":"d8c4d8b9f96e530db6e65b3bb31bb78867e725f7a062d3bb6736ba30f7ee4dd4"} Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.138236 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwvqf" event={"ID":"c6e0195b-10e8-465d-9e3d-548633d29ed7","Type":"ContainerStarted","Data":"85e87d404ca3c0ef85fd4e6d315997ef97028c32d3ca908c1b6aa2e31d31f3c4"} Dec 05 15:01:13 crc kubenswrapper[4840]: E1205 15:01:13.142500 4840 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc195b16_d39f_4786_a866_aab4d3377d52.slice/crio-5dc3451197173ee17564fb9974b4a24c574d5d1c1a03310250de6ed13d4ed92e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7d16875_422c_4a41_8fd2_498bb020ab9a.slice/crio-6097bb62d3c707cbd4806c7ff7d94a0899a3fbc0820afbb18b5ae18d9c3836f2.scope\": RecentStats: unable to find data in memory cache]" Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.160677 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.160637373 podStartE2EDuration="4.160637373s" podCreationTimestamp="2025-12-05 15:01:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:13.138012683 +0000 UTC m=+151.479075297" watchObservedRunningTime="2025-12-05 15:01:13.160637373 +0000 UTC m=+151.501699987" Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.194279 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:13 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:13 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:13 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:13 crc kubenswrapper[4840]: I1205 15:01:13.194335 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.200811 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:14 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:14 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:14 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.201197 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.209810 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" event={"ID":"71224ef1-9751-49f0-89d6-18b5225f97cb","Type":"ContainerStarted","Data":"b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.209856 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" event={"ID":"71224ef1-9751-49f0-89d6-18b5225f97cb","Type":"ContainerStarted","Data":"43d6bc68f8980ff493e58e822c4b1079bf9718b91cb6a17f0210692a4ed87837"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.229748 4840 generic.go:334] "Generic (PLEG): container finished" podID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerID="6097bb62d3c707cbd4806c7ff7d94a0899a3fbc0820afbb18b5ae18d9c3836f2" exitCode=0 Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.229800 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerDied","Data":"6097bb62d3c707cbd4806c7ff7d94a0899a3fbc0820afbb18b5ae18d9c3836f2"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.232151 4840 generic.go:334] "Generic (PLEG): container finished" podID="4fae2445-a175-44cf-8873-684235999c95" containerID="6f55234ac70feba8559b06c1fef8565fc1d08296ceafc9a269997fa241d12383" exitCode=0 Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.232195 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4fae2445-a175-44cf-8873-684235999c95","Type":"ContainerDied","Data":"6f55234ac70feba8559b06c1fef8565fc1d08296ceafc9a269997fa241d12383"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.235836 4840 generic.go:334] "Generic (PLEG): container finished" podID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerID="b03d80f08fbdb37660fe078530c5a404593a4038b8f5b67efd68867091938f74" exitCode=0 Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.235913 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwvqf" event={"ID":"c6e0195b-10e8-465d-9e3d-548633d29ed7","Type":"ContainerDied","Data":"b03d80f08fbdb37660fe078530c5a404593a4038b8f5b67efd68867091938f74"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.240187 4840 generic.go:334] "Generic (PLEG): container finished" podID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerID="0d632a8ed526e130e99d7788db3be246ee246e2ab1c2b594e95f4798b37d448b" exitCode=0 Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.240268 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdjqt" event={"ID":"a8f9a961-ee6b-429f-b07e-12ee35a7c986","Type":"ContainerDied","Data":"0d632a8ed526e130e99d7788db3be246ee246e2ab1c2b594e95f4798b37d448b"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.247692 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"651ef320ca30aa1e69f348497b5901bb4ef9e6d07911df25d14944d6a3874a7a"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.249569 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"daa13445-a3cd-499b-9cee-0a10057f274a","Type":"ContainerStarted","Data":"73c5e966fb92d82bb489b5bae9cef555c4311f8faf73ef9236fab5f04c5f39d1"} Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.254457 4840 generic.go:334] "Generic (PLEG): container finished" podID="fc195b16-d39f-4786-a866-aab4d3377d52" containerID="5dc3451197173ee17564fb9974b4a24c574d5d1c1a03310250de6ed13d4ed92e" exitCode=0 Dec 05 15:01:14 crc kubenswrapper[4840]: I1205 15:01:14.255311 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerDied","Data":"5dc3451197173ee17564fb9974b4a24c574d5d1c1a03310250de6ed13d4ed92e"} Dec 05 15:01:15 crc kubenswrapper[4840]: I1205 15:01:15.186084 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:15 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:15 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:15 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:15 crc kubenswrapper[4840]: I1205 15:01:15.186530 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:15 crc kubenswrapper[4840]: I1205 15:01:15.303457 4840 generic.go:334] "Generic (PLEG): container finished" podID="daa13445-a3cd-499b-9cee-0a10057f274a" containerID="73c5e966fb92d82bb489b5bae9cef555c4311f8faf73ef9236fab5f04c5f39d1" exitCode=0 Dec 05 15:01:15 crc kubenswrapper[4840]: I1205 15:01:15.303577 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"daa13445-a3cd-499b-9cee-0a10057f274a","Type":"ContainerDied","Data":"73c5e966fb92d82bb489b5bae9cef555c4311f8faf73ef9236fab5f04c5f39d1"} Dec 05 15:01:15 crc kubenswrapper[4840]: I1205 15:01:15.303774 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:15 crc kubenswrapper[4840]: I1205 15:01:15.340510 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" podStartSLOduration=134.340491241 podStartE2EDuration="2m14.340491241s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:01:15.33092422 +0000 UTC m=+153.671986844" watchObservedRunningTime="2025-12-05 15:01:15.340491241 +0000 UTC m=+153.681553855" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.029674 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.044345 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.056586 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-g59rm" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.188987 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:16 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:16 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:16 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.189059 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.268145 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-5t5n2" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.344688 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"4fae2445-a175-44cf-8873-684235999c95","Type":"ContainerDied","Data":"8ed6b644b15b058589df8e8bac8b70e32c0b1875d24dc8c5bf214dee02a72e52"} Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.344733 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ed6b644b15b058589df8e8bac8b70e32c0b1875d24dc8c5bf214dee02a72e52" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.378773 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.475631 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4fae2445-a175-44cf-8873-684235999c95-kubelet-dir\") pod \"4fae2445-a175-44cf-8873-684235999c95\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.475744 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4fae2445-a175-44cf-8873-684235999c95-kube-api-access\") pod \"4fae2445-a175-44cf-8873-684235999c95\" (UID: \"4fae2445-a175-44cf-8873-684235999c95\") " Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.475757 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4fae2445-a175-44cf-8873-684235999c95-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4fae2445-a175-44cf-8873-684235999c95" (UID: "4fae2445-a175-44cf-8873-684235999c95"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.476144 4840 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4fae2445-a175-44cf-8873-684235999c95-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.502310 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fae2445-a175-44cf-8873-684235999c95-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4fae2445-a175-44cf-8873-684235999c95" (UID: "4fae2445-a175-44cf-8873-684235999c95"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.577235 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4fae2445-a175-44cf-8873-684235999c95-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.865773 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.987006 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/daa13445-a3cd-499b-9cee-0a10057f274a-kubelet-dir\") pod \"daa13445-a3cd-499b-9cee-0a10057f274a\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.987963 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/daa13445-a3cd-499b-9cee-0a10057f274a-kube-api-access\") pod \"daa13445-a3cd-499b-9cee-0a10057f274a\" (UID: \"daa13445-a3cd-499b-9cee-0a10057f274a\") " Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.987223 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/daa13445-a3cd-499b-9cee-0a10057f274a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "daa13445-a3cd-499b-9cee-0a10057f274a" (UID: "daa13445-a3cd-499b-9cee-0a10057f274a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:01:16 crc kubenswrapper[4840]: I1205 15:01:16.997823 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daa13445-a3cd-499b-9cee-0a10057f274a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "daa13445-a3cd-499b-9cee-0a10057f274a" (UID: "daa13445-a3cd-499b-9cee-0a10057f274a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.089204 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/daa13445-a3cd-499b-9cee-0a10057f274a-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.089242 4840 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/daa13445-a3cd-499b-9cee-0a10057f274a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.186563 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:17 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:17 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:17 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.186615 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.401113 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.402177 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"daa13445-a3cd-499b-9cee-0a10057f274a","Type":"ContainerDied","Data":"586341d67a6b255ad0132fee2ef9e49bfc25afa768b95c0d242bbfe973c36f7a"} Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.402247 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 15:01:17 crc kubenswrapper[4840]: I1205 15:01:17.402217 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="586341d67a6b255ad0132fee2ef9e49bfc25afa768b95c0d242bbfe973c36f7a" Dec 05 15:01:18 crc kubenswrapper[4840]: I1205 15:01:18.185507 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:18 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:18 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:18 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:18 crc kubenswrapper[4840]: I1205 15:01:18.185947 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:19 crc kubenswrapper[4840]: I1205 15:01:19.192097 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:19 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:19 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:19 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:19 crc kubenswrapper[4840]: I1205 15:01:19.192434 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:19 crc kubenswrapper[4840]: I1205 15:01:19.472072 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:01:19 crc kubenswrapper[4840]: I1205 15:01:19.472156 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.186411 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:20 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:20 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:20 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.186474 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.542343 4840 patch_prober.go:28] interesting pod/console-f9d7485db-zb8r7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.542400 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zb8r7" podUID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.759412 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.759468 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.759846 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:20 crc kubenswrapper[4840]: I1205 15:01:20.759888 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:21 crc kubenswrapper[4840]: I1205 15:01:21.200387 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:21 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:21 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:21 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:21 crc kubenswrapper[4840]: I1205 15:01:21.200441 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:22 crc kubenswrapper[4840]: I1205 15:01:22.199494 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:22 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:22 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:22 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:22 crc kubenswrapper[4840]: I1205 15:01:22.199548 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:23 crc kubenswrapper[4840]: I1205 15:01:23.207274 4840 patch_prober.go:28] interesting pod/router-default-5444994796-qrgwq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 15:01:23 crc kubenswrapper[4840]: [-]has-synced failed: reason withheld Dec 05 15:01:23 crc kubenswrapper[4840]: [+]process-running ok Dec 05 15:01:23 crc kubenswrapper[4840]: healthz check failed Dec 05 15:01:23 crc kubenswrapper[4840]: I1205 15:01:23.207463 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qrgwq" podUID="cf41adb4-ca77-4997-a2db-0e45bbe317c3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 15:01:23 crc kubenswrapper[4840]: I1205 15:01:23.617393 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:01:23 crc kubenswrapper[4840]: I1205 15:01:23.631983 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f5cf5212-af00-4788-ad5f-ff824fea7c0f-metrics-certs\") pod \"network-metrics-daemon-gn7qq\" (UID: \"f5cf5212-af00-4788-ad5f-ff824fea7c0f\") " pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:01:23 crc kubenswrapper[4840]: I1205 15:01:23.691651 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gn7qq" Dec 05 15:01:24 crc kubenswrapper[4840]: I1205 15:01:24.190108 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:24 crc kubenswrapper[4840]: I1205 15:01:24.193986 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-qrgwq" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.583032 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.590260 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.766182 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.766237 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.766336 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.766387 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.766433 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.767824 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.767893 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.768393 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"11a956238879e41d32114ff1b846a4d5c3816eb63ff7b9210765cfe75d1223c2"} pod="openshift-console/downloads-7954f5f757-rbbsb" containerMessage="Container download-server failed liveness probe, will be restarted" Dec 05 15:01:30 crc kubenswrapper[4840]: I1205 15:01:30.768655 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" containerID="cri-o://11a956238879e41d32114ff1b846a4d5c3816eb63ff7b9210765cfe75d1223c2" gracePeriod=2 Dec 05 15:01:32 crc kubenswrapper[4840]: I1205 15:01:32.555090 4840 generic.go:334] "Generic (PLEG): container finished" podID="ffde146e-eb04-4056-acb2-febc2da78e46" containerID="11a956238879e41d32114ff1b846a4d5c3816eb63ff7b9210765cfe75d1223c2" exitCode=0 Dec 05 15:01:32 crc kubenswrapper[4840]: I1205 15:01:32.555169 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rbbsb" event={"ID":"ffde146e-eb04-4056-acb2-febc2da78e46","Type":"ContainerDied","Data":"11a956238879e41d32114ff1b846a4d5c3816eb63ff7b9210765cfe75d1223c2"} Dec 05 15:01:32 crc kubenswrapper[4840]: I1205 15:01:32.557537 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:01:40 crc kubenswrapper[4840]: I1205 15:01:40.758717 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:40 crc kubenswrapper[4840]: I1205 15:01:40.759167 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:40 crc kubenswrapper[4840]: I1205 15:01:40.959909 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-8vzfs" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.451471 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 15:01:47 crc kubenswrapper[4840]: E1205 15:01:47.452045 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70be4dce-113d-4659-b5dc-55c3c724de12" containerName="collect-profiles" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.452064 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="70be4dce-113d-4659-b5dc-55c3c724de12" containerName="collect-profiles" Dec 05 15:01:47 crc kubenswrapper[4840]: E1205 15:01:47.452074 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fae2445-a175-44cf-8873-684235999c95" containerName="pruner" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.452083 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fae2445-a175-44cf-8873-684235999c95" containerName="pruner" Dec 05 15:01:47 crc kubenswrapper[4840]: E1205 15:01:47.452102 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa13445-a3cd-499b-9cee-0a10057f274a" containerName="pruner" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.452112 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa13445-a3cd-499b-9cee-0a10057f274a" containerName="pruner" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.452215 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="70be4dce-113d-4659-b5dc-55c3c724de12" containerName="collect-profiles" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.452229 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fae2445-a175-44cf-8873-684235999c95" containerName="pruner" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.452243 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="daa13445-a3cd-499b-9cee-0a10057f274a" containerName="pruner" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.452691 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.455754 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.456182 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.456375 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.533557 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.533674 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.635387 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.635491 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.635594 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.656598 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:47 crc kubenswrapper[4840]: I1205 15:01:47.775919 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:01:49 crc kubenswrapper[4840]: I1205 15:01:49.472249 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:01:49 crc kubenswrapper[4840]: I1205 15:01:49.472303 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:01:50 crc kubenswrapper[4840]: I1205 15:01:50.759602 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:01:50 crc kubenswrapper[4840]: I1205 15:01:50.759680 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:01:51 crc kubenswrapper[4840]: I1205 15:01:51.124675 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.651847 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.653297 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.661410 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.803170 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-var-lock\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.803228 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kube-api-access\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.803261 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kubelet-dir\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.904252 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-var-lock\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.904329 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kube-api-access\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.904370 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kubelet-dir\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.904432 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-var-lock\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.904448 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kubelet-dir\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.925243 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kube-api-access\") pod \"installer-9-crc\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:01:52 crc kubenswrapper[4840]: I1205 15:01:52.979969 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:02:00 crc kubenswrapper[4840]: I1205 15:02:00.815181 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:02:00 crc kubenswrapper[4840]: I1205 15:02:00.815644 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:02:02 crc kubenswrapper[4840]: E1205 15:02:02.205328 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 15:02:02 crc kubenswrapper[4840]: E1205 15:02:02.205647 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lsp47,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-2z4bf_openshift-marketplace(a6138547-0c56-4951-bab0-283fe8649655): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:02 crc kubenswrapper[4840]: E1205 15:02:02.206814 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-2z4bf" podUID="a6138547-0c56-4951-bab0-283fe8649655" Dec 05 15:02:02 crc kubenswrapper[4840]: E1205 15:02:02.209082 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 15:02:02 crc kubenswrapper[4840]: E1205 15:02:02.209228 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7brzn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-xc72x_openshift-marketplace(d59d7b71-22f9-49c1-9415-f420122f72df): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:02 crc kubenswrapper[4840]: E1205 15:02:02.210435 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-xc72x" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.475013 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-2z4bf" podUID="a6138547-0c56-4951-bab0-283fe8649655" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.475063 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-xc72x" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.572598 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.572762 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8lzst,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-scqls_openshift-marketplace(fc195b16-d39f-4786-a866-aab4d3377d52): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.574062 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-scqls" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.582582 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.582689 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rmsqh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-gwvqf_openshift-marketplace(c6e0195b-10e8-465d-9e3d-548633d29ed7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:03 crc kubenswrapper[4840]: E1205 15:02:03.583837 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-gwvqf" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.534253 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-gwvqf" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.534317 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-scqls" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.649465 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.649647 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tpcnj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-hdjqt_openshift-marketplace(a8f9a961-ee6b-429f-b07e-12ee35a7c986): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.651594 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-hdjqt" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.657329 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.657488 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cfzj8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-sm6c5_openshift-marketplace(a7d16875-422c-4a41-8fd2-498bb020ab9a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:07 crc kubenswrapper[4840]: E1205 15:02:07.658656 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-sm6c5" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.238446 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sm6c5" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.238719 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-hdjqt" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.322708 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.324636 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tw7nd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-q9jjl_openshift-marketplace(516530e0-a660-4755-8d26-b7c798a43428): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.326375 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-q9jjl" podUID="516530e0-a660-4755-8d26-b7c798a43428" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.336756 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.337016 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c72mj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vgz4z_openshift-marketplace(2b685425-9cba-4168-a2a6-a4a707989b01): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.338164 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vgz4z" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.545840 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 15:02:09 crc kubenswrapper[4840]: W1205 15:02:09.555115 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podfb5b7dc0_6b27_4994_9eb7_db65746b4f97.slice/crio-475426501a4dba2c133ad1fe31a6275052ad90b457673ad697e300e68cc8e2f1 WatchSource:0}: Error finding container 475426501a4dba2c133ad1fe31a6275052ad90b457673ad697e300e68cc8e2f1: Status 404 returned error can't find the container with id 475426501a4dba2c133ad1fe31a6275052ad90b457673ad697e300e68cc8e2f1 Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.657362 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.662007 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gn7qq"] Dec 05 15:02:09 crc kubenswrapper[4840]: W1205 15:02:09.667779 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podbc946ddf_2ad5_4d6c_b1bc_44ad7734f358.slice/crio-21824e674bbe9d50c7fb2d287840ed2a8db5c2e2063369a80891ee796718b3c5 WatchSource:0}: Error finding container 21824e674bbe9d50c7fb2d287840ed2a8db5c2e2063369a80891ee796718b3c5: Status 404 returned error can't find the container with id 21824e674bbe9d50c7fb2d287840ed2a8db5c2e2063369a80891ee796718b3c5 Dec 05 15:02:09 crc kubenswrapper[4840]: W1205 15:02:09.677474 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5cf5212_af00_4788_ad5f_ff824fea7c0f.slice/crio-88149df100f4d3ea8ea5723ea0904c3b8685a445181a327f34827d33d6a5b3fb WatchSource:0}: Error finding container 88149df100f4d3ea8ea5723ea0904c3b8685a445181a327f34827d33d6a5b3fb: Status 404 returned error can't find the container with id 88149df100f4d3ea8ea5723ea0904c3b8685a445181a327f34827d33d6a5b3fb Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.840267 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fb5b7dc0-6b27-4994-9eb7-db65746b4f97","Type":"ContainerStarted","Data":"475426501a4dba2c133ad1fe31a6275052ad90b457673ad697e300e68cc8e2f1"} Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.841826 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358","Type":"ContainerStarted","Data":"21824e674bbe9d50c7fb2d287840ed2a8db5c2e2063369a80891ee796718b3c5"} Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.846234 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-rbbsb" event={"ID":"ffde146e-eb04-4056-acb2-febc2da78e46","Type":"ContainerStarted","Data":"07443d221377813b486ba26ab8b2fa8dab13c9e457da1fa4c61cedbcccfe779c"} Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.846858 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.846985 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.847024 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:02:09 crc kubenswrapper[4840]: I1205 15:02:09.851123 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" event={"ID":"f5cf5212-af00-4788-ad5f-ff824fea7c0f","Type":"ContainerStarted","Data":"88149df100f4d3ea8ea5723ea0904c3b8685a445181a327f34827d33d6a5b3fb"} Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.853654 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-q9jjl" podUID="516530e0-a660-4755-8d26-b7c798a43428" Dec 05 15:02:09 crc kubenswrapper[4840]: E1205 15:02:09.859484 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-vgz4z" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.759091 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.759180 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.759102 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.759237 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.859315 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" event={"ID":"f5cf5212-af00-4788-ad5f-ff824fea7c0f","Type":"ContainerStarted","Data":"b135dbad7973236e5026b48604ffe4d264d87b5d7b85f157d1035be1d54db434"} Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.859365 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gn7qq" event={"ID":"f5cf5212-af00-4788-ad5f-ff824fea7c0f","Type":"ContainerStarted","Data":"1d261228eb43aabaae93af88f610f507344398c90865623e681aa78d9628ea89"} Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.860907 4840 generic.go:334] "Generic (PLEG): container finished" podID="bc946ddf-2ad5-4d6c-b1bc-44ad7734f358" containerID="cf58bed179563c8431e19df8bcacaf82dc90909a2f03aa492f8e8ed6905fd30b" exitCode=0 Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.860996 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358","Type":"ContainerDied","Data":"cf58bed179563c8431e19df8bcacaf82dc90909a2f03aa492f8e8ed6905fd30b"} Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.862209 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fb5b7dc0-6b27-4994-9eb7-db65746b4f97","Type":"ContainerStarted","Data":"6d60942719955176c7462572ef39e6c6499878c3a6f6530dad17764d373fadc7"} Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.862715 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.862758 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.901684 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-gn7qq" podStartSLOduration=189.901666096 podStartE2EDuration="3m9.901666096s" podCreationTimestamp="2025-12-05 14:59:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:02:10.880333279 +0000 UTC m=+209.221395913" watchObservedRunningTime="2025-12-05 15:02:10.901666096 +0000 UTC m=+209.242728720" Dec 05 15:02:10 crc kubenswrapper[4840]: I1205 15:02:10.915206 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=18.915188712 podStartE2EDuration="18.915188712s" podCreationTimestamp="2025-12-05 15:01:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:02:10.900483394 +0000 UTC m=+209.241546008" watchObservedRunningTime="2025-12-05 15:02:10.915188712 +0000 UTC m=+209.256251326" Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.117054 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.187089 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kubelet-dir\") pod \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.187217 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kube-api-access\") pod \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\" (UID: \"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358\") " Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.187224 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "bc946ddf-2ad5-4d6c-b1bc-44ad7734f358" (UID: "bc946ddf-2ad5-4d6c-b1bc-44ad7734f358"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.187630 4840 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.193071 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "bc946ddf-2ad5-4d6c-b1bc-44ad7734f358" (UID: "bc946ddf-2ad5-4d6c-b1bc-44ad7734f358"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.288562 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc946ddf-2ad5-4d6c-b1bc-44ad7734f358-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.873976 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"bc946ddf-2ad5-4d6c-b1bc-44ad7734f358","Type":"ContainerDied","Data":"21824e674bbe9d50c7fb2d287840ed2a8db5c2e2063369a80891ee796718b3c5"} Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.874300 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21824e674bbe9d50c7fb2d287840ed2a8db5c2e2063369a80891ee796718b3c5" Dec 05 15:02:12 crc kubenswrapper[4840]: I1205 15:02:12.874026 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 15:02:19 crc kubenswrapper[4840]: I1205 15:02:19.471401 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:02:19 crc kubenswrapper[4840]: I1205 15:02:19.472022 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:02:19 crc kubenswrapper[4840]: I1205 15:02:19.472079 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:02:19 crc kubenswrapper[4840]: I1205 15:02:19.472681 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:02:19 crc kubenswrapper[4840]: I1205 15:02:19.472752 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705" gracePeriod=600 Dec 05 15:02:19 crc kubenswrapper[4840]: I1205 15:02:19.911721 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705" exitCode=0 Dec 05 15:02:19 crc kubenswrapper[4840]: I1205 15:02:19.911766 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705"} Dec 05 15:02:20 crc kubenswrapper[4840]: I1205 15:02:20.758688 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:02:20 crc kubenswrapper[4840]: I1205 15:02:20.759067 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:02:20 crc kubenswrapper[4840]: I1205 15:02:20.758849 4840 patch_prober.go:28] interesting pod/downloads-7954f5f757-rbbsb container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 15:02:20 crc kubenswrapper[4840]: I1205 15:02:20.759187 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-rbbsb" podUID="ffde146e-eb04-4056-acb2-febc2da78e46" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 15:02:22 crc kubenswrapper[4840]: I1205 15:02:22.929508 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"94a41c63939146ef314f1a8ae64ad21aa7f707bfd188ab51b86cffd43bc910f9"} Dec 05 15:02:25 crc kubenswrapper[4840]: I1205 15:02:25.958001 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerStarted","Data":"1821b226c0af7030ca6460e01717830ed14b1871e7a49df0819841306361734d"} Dec 05 15:02:25 crc kubenswrapper[4840]: I1205 15:02:25.963363 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xc72x" event={"ID":"d59d7b71-22f9-49c1-9415-f420122f72df","Type":"ContainerStarted","Data":"48357c6d8160591afb9a0999d83e703ff8c5dba67e5b97d19e02f19de5c8b47d"} Dec 05 15:02:25 crc kubenswrapper[4840]: I1205 15:02:25.965058 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwvqf" event={"ID":"c6e0195b-10e8-465d-9e3d-548633d29ed7","Type":"ContainerStarted","Data":"b0c0669d2c6635b2bd16604753789a64509fab8f18d6cc55be33536e6c3bbc06"} Dec 05 15:02:25 crc kubenswrapper[4840]: I1205 15:02:25.966894 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9jjl" event={"ID":"516530e0-a660-4755-8d26-b7c798a43428","Type":"ContainerStarted","Data":"11c1fe4488dc96a54b799ee9cf10f2f10bea90527daf8b9b86489230379b8c87"} Dec 05 15:02:25 crc kubenswrapper[4840]: I1205 15:02:25.970684 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z4bf" event={"ID":"a6138547-0c56-4951-bab0-283fe8649655","Type":"ContainerStarted","Data":"e0371959a85dd3e6a9a50fe7fe0b663bc8be2f7c92ddfaef448a505da843ab10"} Dec 05 15:02:25 crc kubenswrapper[4840]: I1205 15:02:25.979324 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerStarted","Data":"123c670a24708e644cf50a2c6434f66f9d2bdc73f0a25cfb9096daee408cc348"} Dec 05 15:02:26 crc kubenswrapper[4840]: I1205 15:02:26.986742 4840 generic.go:334] "Generic (PLEG): container finished" podID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerID="b0c0669d2c6635b2bd16604753789a64509fab8f18d6cc55be33536e6c3bbc06" exitCode=0 Dec 05 15:02:26 crc kubenswrapper[4840]: I1205 15:02:26.986824 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwvqf" event={"ID":"c6e0195b-10e8-465d-9e3d-548633d29ed7","Type":"ContainerDied","Data":"b0c0669d2c6635b2bd16604753789a64509fab8f18d6cc55be33536e6c3bbc06"} Dec 05 15:02:26 crc kubenswrapper[4840]: I1205 15:02:26.992046 4840 generic.go:334] "Generic (PLEG): container finished" podID="fc195b16-d39f-4786-a866-aab4d3377d52" containerID="123c670a24708e644cf50a2c6434f66f9d2bdc73f0a25cfb9096daee408cc348" exitCode=0 Dec 05 15:02:26 crc kubenswrapper[4840]: I1205 15:02:26.992081 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerDied","Data":"123c670a24708e644cf50a2c6434f66f9d2bdc73f0a25cfb9096daee408cc348"} Dec 05 15:02:28 crc kubenswrapper[4840]: I1205 15:02:28.000726 4840 generic.go:334] "Generic (PLEG): container finished" podID="a6138547-0c56-4951-bab0-283fe8649655" containerID="e0371959a85dd3e6a9a50fe7fe0b663bc8be2f7c92ddfaef448a505da843ab10" exitCode=0 Dec 05 15:02:28 crc kubenswrapper[4840]: I1205 15:02:28.000812 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z4bf" event={"ID":"a6138547-0c56-4951-bab0-283fe8649655","Type":"ContainerDied","Data":"e0371959a85dd3e6a9a50fe7fe0b663bc8be2f7c92ddfaef448a505da843ab10"} Dec 05 15:02:28 crc kubenswrapper[4840]: I1205 15:02:28.006416 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgz4z" event={"ID":"2b685425-9cba-4168-a2a6-a4a707989b01","Type":"ContainerStarted","Data":"031f04740dcae33e9a8ba79c0b30fd8d079566591b5eb4837c7d497ae2b073c1"} Dec 05 15:02:28 crc kubenswrapper[4840]: I1205 15:02:28.016273 4840 generic.go:334] "Generic (PLEG): container finished" podID="d59d7b71-22f9-49c1-9415-f420122f72df" containerID="48357c6d8160591afb9a0999d83e703ff8c5dba67e5b97d19e02f19de5c8b47d" exitCode=0 Dec 05 15:02:28 crc kubenswrapper[4840]: I1205 15:02:28.016377 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xc72x" event={"ID":"d59d7b71-22f9-49c1-9415-f420122f72df","Type":"ContainerDied","Data":"48357c6d8160591afb9a0999d83e703ff8c5dba67e5b97d19e02f19de5c8b47d"} Dec 05 15:02:28 crc kubenswrapper[4840]: I1205 15:02:28.022844 4840 generic.go:334] "Generic (PLEG): container finished" podID="516530e0-a660-4755-8d26-b7c798a43428" containerID="11c1fe4488dc96a54b799ee9cf10f2f10bea90527daf8b9b86489230379b8c87" exitCode=0 Dec 05 15:02:28 crc kubenswrapper[4840]: I1205 15:02:28.022924 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9jjl" event={"ID":"516530e0-a660-4755-8d26-b7c798a43428","Type":"ContainerDied","Data":"11c1fe4488dc96a54b799ee9cf10f2f10bea90527daf8b9b86489230379b8c87"} Dec 05 15:02:29 crc kubenswrapper[4840]: I1205 15:02:29.039330 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdjqt" event={"ID":"a8f9a961-ee6b-429f-b07e-12ee35a7c986","Type":"ContainerStarted","Data":"c5733eec898c952e9a636e8db8194d27df554512c81fcc4a6b0426dcc2cba127"} Dec 05 15:02:30 crc kubenswrapper[4840]: I1205 15:02:30.046007 4840 generic.go:334] "Generic (PLEG): container finished" podID="2b685425-9cba-4168-a2a6-a4a707989b01" containerID="031f04740dcae33e9a8ba79c0b30fd8d079566591b5eb4837c7d497ae2b073c1" exitCode=0 Dec 05 15:02:30 crc kubenswrapper[4840]: I1205 15:02:30.046047 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgz4z" event={"ID":"2b685425-9cba-4168-a2a6-a4a707989b01","Type":"ContainerDied","Data":"031f04740dcae33e9a8ba79c0b30fd8d079566591b5eb4837c7d497ae2b073c1"} Dec 05 15:02:30 crc kubenswrapper[4840]: I1205 15:02:30.122235 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-svf7z"] Dec 05 15:02:30 crc kubenswrapper[4840]: I1205 15:02:30.775777 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-rbbsb" Dec 05 15:02:31 crc kubenswrapper[4840]: I1205 15:02:31.056193 4840 generic.go:334] "Generic (PLEG): container finished" podID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerID="1821b226c0af7030ca6460e01717830ed14b1871e7a49df0819841306361734d" exitCode=0 Dec 05 15:02:31 crc kubenswrapper[4840]: I1205 15:02:31.056236 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerDied","Data":"1821b226c0af7030ca6460e01717830ed14b1871e7a49df0819841306361734d"} Dec 05 15:02:32 crc kubenswrapper[4840]: I1205 15:02:32.064494 4840 generic.go:334] "Generic (PLEG): container finished" podID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerID="c5733eec898c952e9a636e8db8194d27df554512c81fcc4a6b0426dcc2cba127" exitCode=0 Dec 05 15:02:32 crc kubenswrapper[4840]: I1205 15:02:32.064604 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdjqt" event={"ID":"a8f9a961-ee6b-429f-b07e-12ee35a7c986","Type":"ContainerDied","Data":"c5733eec898c952e9a636e8db8194d27df554512c81fcc4a6b0426dcc2cba127"} Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.485606 4840 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.486482 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4" gracePeriod=15 Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.486498 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367" gracePeriod=15 Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.486612 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538" gracePeriod=15 Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.486665 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2" gracePeriod=15 Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.486712 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301" gracePeriod=15 Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487220 4840 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487458 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487473 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487485 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487493 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487509 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487520 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487531 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487539 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487549 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487557 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487569 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc946ddf-2ad5-4d6c-b1bc-44ad7734f358" containerName="pruner" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487577 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc946ddf-2ad5-4d6c-b1bc-44ad7734f358" containerName="pruner" Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487590 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487597 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 15:02:47 crc kubenswrapper[4840]: E1205 15:02:47.487609 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487658 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487781 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487791 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487804 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc946ddf-2ad5-4d6c-b1bc-44ad7734f358" containerName="pruner" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487815 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487825 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.487834 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.488122 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.490140 4840 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.490850 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.497552 4840 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.530671 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563047 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563123 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563171 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563187 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563206 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563240 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563335 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.563426 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.685917 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.685970 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.685988 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686012 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686081 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686101 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686119 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686136 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686204 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686235 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686256 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686277 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686298 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686318 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686335 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.686353 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:47 crc kubenswrapper[4840]: I1205 15:02:47.827930 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.160958 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.163118 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.163847 4840 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367" exitCode=0 Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.164017 4840 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538" exitCode=0 Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.164093 4840 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2" exitCode=0 Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.164195 4840 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301" exitCode=2 Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.163930 4840 scope.go:117] "RemoveContainer" containerID="53e29187d0c048b7f24346e047d269ac2a79c910b362845dece3ebc403e9c4f8" Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.991444 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.993315 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.994488 4840 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:49 crc kubenswrapper[4840]: I1205 15:02:49.994932 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037011 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037109 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037137 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037141 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037203 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037295 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037495 4840 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037521 4840 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.037533 4840 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.073648 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.198099 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z4bf" event={"ID":"a6138547-0c56-4951-bab0-283fe8649655","Type":"ContainerStarted","Data":"36361fd60bda7a7192f3b3c5c71e9c97dbdcd457ceb7915d08f5c05194333f6d"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.199537 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.199927 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.203816 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgz4z" event={"ID":"2b685425-9cba-4168-a2a6-a4a707989b01","Type":"ContainerStarted","Data":"823ac419b97f7a5053448c40d0322153c59d321d6315151f6d823df9eeaab6d8"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.204929 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.205344 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.206225 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.207619 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xc72x" event={"ID":"d59d7b71-22f9-49c1-9415-f420122f72df","Type":"ContainerStarted","Data":"57d09f65354c43dc470b6e2a65b668adba4974d4af62cbf5f55b8d2887de43e9"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.208255 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.208733 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.209119 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.209470 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.210533 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c5650ce023caa08166097767375f9e59662a9c47edc9ac7ff6a5f7bc9136b7a9"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.210573 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0d85a1c57381013bcf864adf557c3d0b98129f2f139edd0cf4eae43c5c9f17f5"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.211380 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.211535 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.211692 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.211838 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.217447 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdjqt" event={"ID":"a8f9a961-ee6b-429f-b07e-12ee35a7c986","Type":"ContainerStarted","Data":"e302deb3079d4f440df0b6b5b13dd17c2582341fb875cfd53560982ebb98fe36"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.218344 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.218510 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.218821 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.219270 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.219466 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.221195 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerStarted","Data":"4f0999ddd0d56682d452b276e9a0de9689245d98b9c4745fac4efdd38ccf3729"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.221923 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.222127 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.226018 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.226740 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.230062 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.230268 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.231109 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerStarted","Data":"2330eeea92785e54774a1e51df6a70a7acb18a4600c89b8fb547537fc777eba4"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.231618 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.231848 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.232019 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.232202 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.232364 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.232533 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.232692 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.233556 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.234122 4840 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4" exitCode=0 Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.234185 4840 scope.go:117] "RemoveContainer" containerID="b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.234308 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.235897 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.236071 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.236208 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.236353 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.236498 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.237671 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.237945 4840 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.238104 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.238624 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.238774 4840 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.238923 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.239079 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.239279 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.239461 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.239552 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwvqf" event={"ID":"c6e0195b-10e8-465d-9e3d-548633d29ed7","Type":"ContainerStarted","Data":"8fa50c9170cacedef1105761d60bbb70a7702a0e13feaf14e69840a1d32e8264"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.239616 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.239749 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.239942 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.240111 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.240248 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.240384 4840 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.240522 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.240671 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.240806 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.240968 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.241111 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.241628 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9jjl" event={"ID":"516530e0-a660-4755-8d26-b7c798a43428","Type":"ContainerStarted","Data":"fa4f5f2ec8eb9d070f56c8245a016e5c7466e7de0387c00c05691554ba7208fc"} Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.242137 4840 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.242346 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.242557 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.242696 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.242832 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.242987 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.243126 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.243264 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.243396 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.243684 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.261884 4840 scope.go:117] "RemoveContainer" containerID="00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.293737 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:02:50Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:02:50Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:02:50Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T15:02:50Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:15adb3b2133604b064893f8009a74145e4c8bb5b134d111346dcccbdd2aa9bc2\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:164fc35a19aa6cc886c8015c8ee3eba4895e76b1152cb9d795e4f3154a8533a3\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610512706},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:610b8d322265b2c9d6b07efb2be26bf4d91e428b46412d73f5bdae0218004794\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:eafb9c83c480396c34e85d1f5f5c2623be6305031245be36455850c0398bfcc7\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1209064267},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:0029526507396e493c5dce1652c41ed9c239b29e84ee579a2735fdb1aa3bce83\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:e1d263cd2113e0727021ccf27c8a671f8cfeaefbf93d60e3a918d6f60c136c30\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201604946},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:3b6f8e242c6bda0d1e0a52ac08821100111fc9448c9c278b0b17a7a0bb089934\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:6008b6485650ad4746f9631eb61a02509bff383edb36e8ddf45b4b44e6785ef3\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129901376},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.294108 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.294410 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.294731 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.294948 4840 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.294968 4840 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.297232 4840 scope.go:117] "RemoveContainer" containerID="de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.323995 4840 scope.go:117] "RemoveContainer" containerID="e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.344047 4840 scope.go:117] "RemoveContainer" containerID="47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.365571 4840 scope.go:117] "RemoveContainer" containerID="b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.392519 4840 scope.go:117] "RemoveContainer" containerID="b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.393123 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\": container with ID starting with b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367 not found: ID does not exist" containerID="b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.393164 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367"} err="failed to get container status \"b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\": rpc error: code = NotFound desc = could not find container \"b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367\": container with ID starting with b0d03925db2d68da93d1d5d00fd37d40472f72cb39f77763e30aba820e6eb367 not found: ID does not exist" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.393193 4840 scope.go:117] "RemoveContainer" containerID="00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.398569 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\": container with ID starting with 00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538 not found: ID does not exist" containerID="00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.398625 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538"} err="failed to get container status \"00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\": rpc error: code = NotFound desc = could not find container \"00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538\": container with ID starting with 00bb80b3204ba94e2b1050cf7b291955ce6024db716d30884c026d3fabf20538 not found: ID does not exist" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.398661 4840 scope.go:117] "RemoveContainer" containerID="de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.399208 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\": container with ID starting with de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2 not found: ID does not exist" containerID="de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.399255 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2"} err="failed to get container status \"de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\": rpc error: code = NotFound desc = could not find container \"de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2\": container with ID starting with de7c6a7d00017d9b13b11b910e13324d9e720ded1e9762734cfda88bf651cfe2 not found: ID does not exist" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.399288 4840 scope.go:117] "RemoveContainer" containerID="e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.399578 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\": container with ID starting with e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301 not found: ID does not exist" containerID="e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.399616 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301"} err="failed to get container status \"e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\": rpc error: code = NotFound desc = could not find container \"e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301\": container with ID starting with e8e493595b1913a3b704c9e09970d61907040e1b6fe985affd852b3e4841f301 not found: ID does not exist" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.399642 4840 scope.go:117] "RemoveContainer" containerID="47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.399987 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\": container with ID starting with 47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4 not found: ID does not exist" containerID="47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.400020 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4"} err="failed to get container status \"47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\": rpc error: code = NotFound desc = could not find container \"47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4\": container with ID starting with 47342b47ea418c2c0b763913cead64e8a754365664ab2671636b1e3039e847a4 not found: ID does not exist" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.400041 4840 scope.go:117] "RemoveContainer" containerID="b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7" Dec 05 15:02:50 crc kubenswrapper[4840]: E1205 15:02:50.400290 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\": container with ID starting with b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7 not found: ID does not exist" containerID="b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.400316 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7"} err="failed to get container status \"b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\": rpc error: code = NotFound desc = could not find container \"b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7\": container with ID starting with b473f778bb09df1185b3b808c9bd3947db84a4b2dd2d5d7a38bb2ebb933d18f7 not found: ID does not exist" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.485008 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.485337 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.987179 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:02:50 crc kubenswrapper[4840]: I1205 15:02:50.987241 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:02:51 crc kubenswrapper[4840]: I1205 15:02:51.344978 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:02:51 crc kubenswrapper[4840]: I1205 15:02:51.345020 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:02:51 crc kubenswrapper[4840]: I1205 15:02:51.499648 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:02:51 crc kubenswrapper[4840]: I1205 15:02:51.499698 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:02:51 crc kubenswrapper[4840]: I1205 15:02:51.728292 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-scqls" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="registry-server" probeResult="failure" output=< Dec 05 15:02:51 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:02:51 crc kubenswrapper[4840]: > Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.032982 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-gwvqf" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="registry-server" probeResult="failure" output=< Dec 05 15:02:52 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:02:52 crc kubenswrapper[4840]: > Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.070701 4840 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.071088 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.071391 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.071731 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.072047 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.072358 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.072776 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.074111 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.074474 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.074775 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.387711 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hdjqt" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="registry-server" probeResult="failure" output=< Dec 05 15:02:52 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:02:52 crc kubenswrapper[4840]: > Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.554943 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sm6c5" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="registry-server" probeResult="failure" output=< Dec 05 15:02:52 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:02:52 crc kubenswrapper[4840]: > Dec 05 15:02:52 crc kubenswrapper[4840]: E1205 15:02:52.691001 4840 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.195:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-2z4bf.187e59ea804beb61 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-2z4bf,UID:a6138547-0c56-4951-bab0-283fe8649655,APIVersion:v1,ResourceVersion:27909,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 19.682s (19.682s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 15:02:47.685491553 +0000 UTC m=+246.026554167,LastTimestamp:2025-12-05 15:02:47.685491553 +0000 UTC m=+246.026554167,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 15:02:52 crc kubenswrapper[4840]: E1205 15:02:52.877922 4840 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: E1205 15:02:52.878387 4840 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: E1205 15:02:52.878768 4840 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: E1205 15:02:52.879351 4840 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: E1205 15:02:52.879715 4840 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:52 crc kubenswrapper[4840]: I1205 15:02:52.879767 4840 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 15:02:52 crc kubenswrapper[4840]: E1205 15:02:52.880099 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="200ms" Dec 05 15:02:53 crc kubenswrapper[4840]: E1205 15:02:53.081197 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="400ms" Dec 05 15:02:53 crc kubenswrapper[4840]: E1205 15:02:53.482257 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="800ms" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.274597 4840 generic.go:334] "Generic (PLEG): container finished" podID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" containerID="6d60942719955176c7462572ef39e6c6499878c3a6f6530dad17764d373fadc7" exitCode=0 Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.274676 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fb5b7dc0-6b27-4994-9eb7-db65746b4f97","Type":"ContainerDied","Data":"6d60942719955176c7462572ef39e6c6499878c3a6f6530dad17764d373fadc7"} Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.275780 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.276236 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.276592 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.276899 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.277215 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.277485 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.277784 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.278034 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.278284 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: I1205 15:02:54.278633 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:54 crc kubenswrapper[4840]: E1205 15:02:54.282753 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="1.6s" Dec 05 15:02:54 crc kubenswrapper[4840]: E1205 15:02:54.925818 4840 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.195:6443: connect: connection refused" event="&Event{ObjectMeta:{community-operators-2z4bf.187e59ea804beb61 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:community-operators-2z4bf,UID:a6138547-0c56-4951-bab0-283fe8649655,APIVersion:v1,ResourceVersion:27909,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 19.682s (19.682s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 15:02:47.685491553 +0000 UTC m=+246.026554167,LastTimestamp:2025-12-05 15:02:47.685491553 +0000 UTC m=+246.026554167,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 15:02:55 crc kubenswrapper[4840]: E1205 15:02:55.075673 4840 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.195:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" volumeName="registry-storage" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.158948 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" podUID="811f6598-f603-4a15-8dec-add067d82d5c" containerName="oauth-openshift" containerID="cri-o://c3316cb2c121789e4cbf93384a78074f73f1c4e84d8e13cc2ec0dbb5ba22f90e" gracePeriod=15 Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.519466 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.520501 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.520786 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.521079 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.521411 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.521781 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.522136 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.522286 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.522427 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.522565 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.522706 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.603615 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kube-api-access\") pod \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.603687 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-var-lock\") pod \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.603726 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kubelet-dir\") pod \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\" (UID: \"fb5b7dc0-6b27-4994-9eb7-db65746b4f97\") " Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.604227 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fb5b7dc0-6b27-4994-9eb7-db65746b4f97" (UID: "fb5b7dc0-6b27-4994-9eb7-db65746b4f97"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.605009 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-var-lock" (OuterVolumeSpecName: "var-lock") pod "fb5b7dc0-6b27-4994-9eb7-db65746b4f97" (UID: "fb5b7dc0-6b27-4994-9eb7-db65746b4f97"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.610047 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fb5b7dc0-6b27-4994-9eb7-db65746b4f97" (UID: "fb5b7dc0-6b27-4994-9eb7-db65746b4f97"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.705812 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.705857 4840 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:55 crc kubenswrapper[4840]: I1205 15:02:55.705963 4840 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fb5b7dc0-6b27-4994-9eb7-db65746b4f97-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:02:55 crc kubenswrapper[4840]: E1205 15:02:55.883966 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="3.2s" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.289244 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"fb5b7dc0-6b27-4994-9eb7-db65746b4f97","Type":"ContainerDied","Data":"475426501a4dba2c133ad1fe31a6275052ad90b457673ad697e300e68cc8e2f1"} Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.289302 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="475426501a4dba2c133ad1fe31a6275052ad90b457673ad697e300e68cc8e2f1" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.289362 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.293290 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.293621 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.293809 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.294027 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.294302 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.294576 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.294926 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.295168 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.295421 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:56 crc kubenswrapper[4840]: I1205 15:02:56.295753 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.066906 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.067909 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.068485 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.068971 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.069247 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.069500 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.069759 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.070053 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.070260 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.070500 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.070758 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.088003 4840 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.088052 4840 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:02:58 crc kubenswrapper[4840]: E1205 15:02:58.088572 4840 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.089114 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:58 crc kubenswrapper[4840]: W1205 15:02:58.112963 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-7c9c584376617fed6b8a8fcb147f04452905ef7fa7931b06544218ec0e8fe2d8 WatchSource:0}: Error finding container 7c9c584376617fed6b8a8fcb147f04452905ef7fa7931b06544218ec0e8fe2d8: Status 404 returned error can't find the container with id 7c9c584376617fed6b8a8fcb147f04452905ef7fa7931b06544218ec0e8fe2d8 Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.182833 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.183271 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.319725 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7c9c584376617fed6b8a8fcb147f04452905ef7fa7931b06544218ec0e8fe2d8"} Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.335634 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.335673 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.598988 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.599426 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.599978 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.600932 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.601236 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.601621 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.601897 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.602236 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.602313 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.602517 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.602810 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.603080 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.603415 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.603697 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.603933 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.604122 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.604299 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.604476 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.604767 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.605012 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.605188 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.605375 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.644747 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.645405 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.645709 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.645992 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.646250 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.646486 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.646730 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.646993 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.647297 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.647538 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.647775 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.920674 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.921184 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.958356 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.958972 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.959371 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.959664 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.959896 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.960131 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.960380 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.960648 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.960904 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.961115 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:58 crc kubenswrapper[4840]: I1205 15:02:58.961312 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.028030 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.028083 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.081714 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.082284 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.082773 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.083207 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.083542 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.083989 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: E1205 15:02:59.084373 4840 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.195:6443: connect: connection refused" interval="6.4s" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.084468 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.084906 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.085110 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.085284 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.085457 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.329342 4840 generic.go:334] "Generic (PLEG): container finished" podID="811f6598-f603-4a15-8dec-add067d82d5c" containerID="c3316cb2c121789e4cbf93384a78074f73f1c4e84d8e13cc2ec0dbb5ba22f90e" exitCode=0 Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.329459 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" event={"ID":"811f6598-f603-4a15-8dec-add067d82d5c","Type":"ContainerDied","Data":"c3316cb2c121789e4cbf93384a78074f73f1c4e84d8e13cc2ec0dbb5ba22f90e"} Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.332654 4840 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="1435deea291cef8f97c2088c444d68917257aee6e90252f40d188f4bb8587352" exitCode=0 Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.332714 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"1435deea291cef8f97c2088c444d68917257aee6e90252f40d188f4bb8587352"} Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.333291 4840 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.333314 4840 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:02:59 crc kubenswrapper[4840]: E1205 15:02:59.334234 4840 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.335120 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.335599 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.335976 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.337596 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.338395 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.338596 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.338756 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.338946 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.339123 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.339303 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.387480 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.388053 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.388301 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.388639 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.388906 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.389185 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.389646 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.389759 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.390120 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.390351 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.390615 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.390906 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.391283 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.391533 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.391831 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.392193 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.392468 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.393119 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.393454 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.393847 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.394238 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.394578 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.444515 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.444985 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.445566 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.445938 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.446216 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.446515 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.446748 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.447036 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.447360 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.447623 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.447912 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.848832 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.849757 4840 status_manager.go:851] "Failed to get status for pod" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" pod="openshift-marketplace/community-operators-xc72x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-xc72x\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.849973 4840 status_manager.go:851] "Failed to get status for pod" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.850212 4840 status_manager.go:851] "Failed to get status for pod" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" pod="openshift-marketplace/redhat-operators-hdjqt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdjqt\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.850504 4840 status_manager.go:851] "Failed to get status for pod" podUID="516530e0-a660-4755-8d26-b7c798a43428" pod="openshift-marketplace/certified-operators-q9jjl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-q9jjl\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.850778 4840 status_manager.go:851] "Failed to get status for pod" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" pod="openshift-marketplace/redhat-operators-sm6c5" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-sm6c5\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.851040 4840 status_manager.go:851] "Failed to get status for pod" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" pod="openshift-marketplace/redhat-marketplace-gwvqf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gwvqf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.851297 4840 status_manager.go:851] "Failed to get status for pod" podUID="811f6598-f603-4a15-8dec-add067d82d5c" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-svf7z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.851551 4840 status_manager.go:851] "Failed to get status for pod" podUID="a6138547-0c56-4951-bab0-283fe8649655" pod="openshift-marketplace/community-operators-2z4bf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-2z4bf\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.851806 4840 status_manager.go:851] "Failed to get status for pod" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" pod="openshift-marketplace/redhat-marketplace-scqls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-scqls\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.852092 4840 status_manager.go:851] "Failed to get status for pod" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" pod="openshift-marketplace/certified-operators-vgz4z" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vgz4z\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.852350 4840 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.195:6443: connect: connection refused" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.959926 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-audit-policies\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.959992 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-error\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960038 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-ocp-branding-template\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960142 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-idp-0-file-data\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960169 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/811f6598-f603-4a15-8dec-add067d82d5c-audit-dir\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960192 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-login\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960226 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-trusted-ca-bundle\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960269 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-session\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960299 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-router-certs\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960322 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-serving-cert\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960346 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxlfl\" (UniqueName: \"kubernetes.io/projected/811f6598-f603-4a15-8dec-add067d82d5c-kube-api-access-zxlfl\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960377 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-cliconfig\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960416 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-service-ca\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960449 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-provider-selection\") pod \"811f6598-f603-4a15-8dec-add067d82d5c\" (UID: \"811f6598-f603-4a15-8dec-add067d82d5c\") " Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.960825 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/811f6598-f603-4a15-8dec-add067d82d5c-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.961103 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.961606 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.962178 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.962402 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.967295 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.967832 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.968095 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.968282 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.968636 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.968918 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.972069 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/811f6598-f603-4a15-8dec-add067d82d5c-kube-api-access-zxlfl" (OuterVolumeSpecName: "kube-api-access-zxlfl") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "kube-api-access-zxlfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.973022 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:02:59 crc kubenswrapper[4840]: I1205 15:02:59.974070 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "811f6598-f603-4a15-8dec-add067d82d5c" (UID: "811f6598-f603-4a15-8dec-add067d82d5c"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061489 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061531 4840 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061542 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061555 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061565 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061573 4840 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/811f6598-f603-4a15-8dec-add067d82d5c-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061583 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061593 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061602 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061612 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061623 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061636 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxlfl\" (UniqueName: \"kubernetes.io/projected/811f6598-f603-4a15-8dec-add067d82d5c-kube-api-access-zxlfl\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061650 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.061664 4840 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/811f6598-f603-4a15-8dec-add067d82d5c-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.340178 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" event={"ID":"811f6598-f603-4a15-8dec-add067d82d5c","Type":"ContainerDied","Data":"22211d573c969e8c92188faeb74edac067d1c26f980ea7d6b6e3e262f1168451"} Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.340237 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-svf7z" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.340251 4840 scope.go:117] "RemoveContainer" containerID="c3316cb2c121789e4cbf93384a78074f73f1c4e84d8e13cc2ec0dbb5ba22f90e" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.343544 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"15c070630c255a81808efca46a513a5ce76cbae731b9567f1c43f079fab1ba8f"} Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.522336 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:03:00 crc kubenswrapper[4840]: I1205 15:03:00.561701 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:03:01 crc kubenswrapper[4840]: I1205 15:03:01.042899 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:03:01 crc kubenswrapper[4840]: I1205 15:03:01.089750 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:03:01 crc kubenswrapper[4840]: I1205 15:03:01.387437 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:03:01 crc kubenswrapper[4840]: I1205 15:03:01.428643 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:03:01 crc kubenswrapper[4840]: I1205 15:03:01.550019 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:03:01 crc kubenswrapper[4840]: I1205 15:03:01.595960 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:03:02 crc kubenswrapper[4840]: I1205 15:03:02.360082 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"27e90803fbf8f0f5bb265ebd555ca49f9103b615a2ee226fba2437b87ce47ce3"} Dec 05 15:03:02 crc kubenswrapper[4840]: I1205 15:03:02.363129 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 15:03:02 crc kubenswrapper[4840]: I1205 15:03:02.363316 4840 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f" exitCode=1 Dec 05 15:03:02 crc kubenswrapper[4840]: I1205 15:03:02.364160 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f"} Dec 05 15:03:02 crc kubenswrapper[4840]: I1205 15:03:02.364571 4840 scope.go:117] "RemoveContainer" containerID="a694e350aea79f5955acd4dacaa3c63d447a5be9eff69bca107e56efd0b3a54f" Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.374571 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.375021 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8b57412f4001c009d6093fa8ed456eb1109d34ed925aa986fb92ea9b18ed5793"} Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.381368 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8659f43ae8bbbebf8f9c953333ff8dee6612325fed0e0d6c6875bd675d1bb007"} Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.381759 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.382034 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f877cfa1f1c4996030f713b8361e4a36aa72c38b0023fc6158f80d25213f2b20"} Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.382244 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"602988fc0981f7d43a340bf36653fdc09caa9dfcf0d9a836e2da0dd719df6f11"} Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.381972 4840 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.382615 4840 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.390810 4840 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.492043 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 15:03:03 crc kubenswrapper[4840]: I1205 15:03:03.496751 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 15:03:04 crc kubenswrapper[4840]: I1205 15:03:04.386890 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 15:03:04 crc kubenswrapper[4840]: I1205 15:03:04.387282 4840 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:03:04 crc kubenswrapper[4840]: I1205 15:03:04.388170 4840 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="810c38e3-e0d7-4b5c-9c12-5847a5b81a3d" Dec 05 15:03:06 crc kubenswrapper[4840]: I1205 15:03:06.983954 4840 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9087f90b-cbdb-4c9f-b924-064613e227ad" Dec 05 15:03:14 crc kubenswrapper[4840]: I1205 15:03:14.965123 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 15:03:17 crc kubenswrapper[4840]: I1205 15:03:17.035528 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 15:03:17 crc kubenswrapper[4840]: I1205 15:03:17.495840 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 15:03:17 crc kubenswrapper[4840]: I1205 15:03:17.639405 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 15:03:17 crc kubenswrapper[4840]: I1205 15:03:17.722951 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 15:03:17 crc kubenswrapper[4840]: I1205 15:03:17.946608 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 15:03:17 crc kubenswrapper[4840]: I1205 15:03:17.969282 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 15:03:18 crc kubenswrapper[4840]: I1205 15:03:18.333509 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 15:03:19 crc kubenswrapper[4840]: I1205 15:03:19.566508 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 15:03:19 crc kubenswrapper[4840]: I1205 15:03:19.642623 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.046356 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.122822 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.212345 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.523513 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.595714 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.617498 4840 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.647921 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 15:03:20 crc kubenswrapper[4840]: I1205 15:03:20.678034 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.121978 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.192759 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.215272 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.297311 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.325939 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.390044 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.397437 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.686840 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.750199 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.789800 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.806005 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.865193 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.865206 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 15:03:21 crc kubenswrapper[4840]: I1205 15:03:21.990589 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.041809 4840 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.089804 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.106198 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.184070 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.298035 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.327332 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.411384 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.469078 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.480804 4840 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.481233 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vgz4z" podStartSLOduration=35.670222949 podStartE2EDuration="2m14.481217234s" podCreationTimestamp="2025-12-05 15:01:08 +0000 UTC" firstStartedPulling="2025-12-05 15:01:10.763729206 +0000 UTC m=+149.104791820" lastFinishedPulling="2025-12-05 15:02:49.574723471 +0000 UTC m=+247.915786105" observedRunningTime="2025-12-05 15:03:06.764535196 +0000 UTC m=+265.105597810" watchObservedRunningTime="2025-12-05 15:03:22.481217234 +0000 UTC m=+280.822279858" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.481368 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-scqls" podStartSLOduration=37.138770235 podStartE2EDuration="2m12.481361509s" podCreationTimestamp="2025-12-05 15:01:10 +0000 UTC" firstStartedPulling="2025-12-05 15:01:14.255815616 +0000 UTC m=+152.596878230" lastFinishedPulling="2025-12-05 15:02:49.59840689 +0000 UTC m=+247.939469504" observedRunningTime="2025-12-05 15:03:06.747607859 +0000 UTC m=+265.088670473" watchObservedRunningTime="2025-12-05 15:03:22.481361509 +0000 UTC m=+280.822424133" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.481458 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sm6c5" podStartSLOduration=35.066518277 podStartE2EDuration="2m11.481453131s" podCreationTimestamp="2025-12-05 15:01:11 +0000 UTC" firstStartedPulling="2025-12-05 15:01:13.137692654 +0000 UTC m=+151.478755278" lastFinishedPulling="2025-12-05 15:02:49.552627518 +0000 UTC m=+247.893690132" observedRunningTime="2025-12-05 15:03:06.869195448 +0000 UTC m=+265.210258062" watchObservedRunningTime="2025-12-05 15:03:22.481453131 +0000 UTC m=+280.822515745" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.482188 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q9jjl" podStartSLOduration=36.647366752 podStartE2EDuration="2m15.482182723s" podCreationTimestamp="2025-12-05 15:01:07 +0000 UTC" firstStartedPulling="2025-12-05 15:01:10.716122439 +0000 UTC m=+149.057185053" lastFinishedPulling="2025-12-05 15:02:49.55093842 +0000 UTC m=+247.892001024" observedRunningTime="2025-12-05 15:03:06.83825271 +0000 UTC m=+265.179315324" watchObservedRunningTime="2025-12-05 15:03:22.482182723 +0000 UTC m=+280.823245337" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.482650 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=35.482645386 podStartE2EDuration="35.482645386s" podCreationTimestamp="2025-12-05 15:02:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:03:06.776312092 +0000 UTC m=+265.117374706" watchObservedRunningTime="2025-12-05 15:03:22.482645386 +0000 UTC m=+280.823708000" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.482751 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2z4bf" podStartSLOduration=38.539393561 podStartE2EDuration="2m15.482746699s" podCreationTimestamp="2025-12-05 15:01:07 +0000 UTC" firstStartedPulling="2025-12-05 15:01:10.742127725 +0000 UTC m=+149.083190339" lastFinishedPulling="2025-12-05 15:02:47.685480863 +0000 UTC m=+246.026543477" observedRunningTime="2025-12-05 15:03:06.727019505 +0000 UTC m=+265.068082139" watchObservedRunningTime="2025-12-05 15:03:22.482746699 +0000 UTC m=+280.823809313" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.483846 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xc72x" podStartSLOduration=36.066948456 podStartE2EDuration="2m14.483839701s" podCreationTimestamp="2025-12-05 15:01:08 +0000 UTC" firstStartedPulling="2025-12-05 15:01:10.797093509 +0000 UTC m=+149.138156123" lastFinishedPulling="2025-12-05 15:02:49.213984754 +0000 UTC m=+247.555047368" observedRunningTime="2025-12-05 15:03:06.792567829 +0000 UTC m=+265.133630433" watchObservedRunningTime="2025-12-05 15:03:22.483839701 +0000 UTC m=+280.824902315" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.484976 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hdjqt" podStartSLOduration=37.154029409 podStartE2EDuration="2m12.484968655s" podCreationTimestamp="2025-12-05 15:01:10 +0000 UTC" firstStartedPulling="2025-12-05 15:01:14.243353763 +0000 UTC m=+152.584416377" lastFinishedPulling="2025-12-05 15:02:49.574293009 +0000 UTC m=+247.915355623" observedRunningTime="2025-12-05 15:03:06.819053726 +0000 UTC m=+265.160116350" watchObservedRunningTime="2025-12-05 15:03:22.484968655 +0000 UTC m=+280.826031279" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.485285 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gwvqf" podStartSLOduration=36.07176904 podStartE2EDuration="2m12.485280994s" podCreationTimestamp="2025-12-05 15:01:10 +0000 UTC" firstStartedPulling="2025-12-05 15:01:13.139073453 +0000 UTC m=+151.480136067" lastFinishedPulling="2025-12-05 15:02:49.552585407 +0000 UTC m=+247.893648021" observedRunningTime="2025-12-05 15:03:06.852542679 +0000 UTC m=+265.193605293" watchObservedRunningTime="2025-12-05 15:03:22.485280994 +0000 UTC m=+280.826343608" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.486154 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-svf7z","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.486207 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.492861 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.518243 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.527738 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.527723299 podStartE2EDuration="19.527723299s" podCreationTimestamp="2025-12-05 15:03:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:03:22.509271758 +0000 UTC m=+280.850334372" watchObservedRunningTime="2025-12-05 15:03:22.527723299 +0000 UTC m=+280.868785913" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.555046 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.786032 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.866017 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.874954 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.903024 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.958892 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 15:03:22 crc kubenswrapper[4840]: I1205 15:03:22.978746 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.001002 4840 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.034072 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.037718 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.075000 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.089351 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.089653 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.101940 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.166508 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.295565 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.314041 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.338512 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.454321 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.504722 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.520451 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.620163 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.630601 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.683182 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.695079 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.740968 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.829630 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.831166 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.837873 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.881823 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.920286 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 15:03:23 crc kubenswrapper[4840]: I1205 15:03:23.956298 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.078315 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="811f6598-f603-4a15-8dec-add067d82d5c" path="/var/lib/kubelet/pods/811f6598-f603-4a15-8dec-add067d82d5c/volumes" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.138909 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.166302 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.222310 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.268680 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.425507 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.431328 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.574298 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.600528 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.616085 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.925578 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.958361 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.981780 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 15:03:24 crc kubenswrapper[4840]: I1205 15:03:24.999546 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.085213 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.151603 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.157995 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.232471 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.310028 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.336187 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.382515 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.443101 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.443952 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.466480 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.504823 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.508157 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.573524 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.628350 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.643487 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.648641 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.680901 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.681014 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.714592 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.740515 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.750850 4840 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.763471 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.766816 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.843158 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 15:03:25 crc kubenswrapper[4840]: I1205 15:03:25.910235 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.058395 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.315413 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.333606 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.385189 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.461025 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.596935 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.604771 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.604937 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.638656 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.785632 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.787535 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.789687 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.810577 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 15:03:26 crc kubenswrapper[4840]: I1205 15:03:26.983039 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.027720 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.031043 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.106757 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.106768 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.108735 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.110346 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.153966 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.174411 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.218662 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.229379 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.270399 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.288733 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.293112 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.304210 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.314129 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.340938 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.350032 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.356710 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.573802 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.750089 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.773949 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.821770 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.880922 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.883794 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 15:03:27 crc kubenswrapper[4840]: I1205 15:03:27.915676 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.081555 4840 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.081779 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c5650ce023caa08166097767375f9e59662a9c47edc9ac7ff6a5f7bc9136b7a9" gracePeriod=5 Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.180877 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.213209 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.298020 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.298841 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.469261 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.559358 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.585389 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.731002 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.855987 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.858930 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 15:03:28 crc kubenswrapper[4840]: I1205 15:03:28.957162 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.086758 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-79945c7d7f-bhfnr"] Dec 05 15:03:29 crc kubenswrapper[4840]: E1205 15:03:29.087321 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" containerName="installer" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.087405 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" containerName="installer" Dec 05 15:03:29 crc kubenswrapper[4840]: E1205 15:03:29.087483 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="811f6598-f603-4a15-8dec-add067d82d5c" containerName="oauth-openshift" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.087552 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="811f6598-f603-4a15-8dec-add067d82d5c" containerName="oauth-openshift" Dec 05 15:03:29 crc kubenswrapper[4840]: E1205 15:03:29.087647 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.087707 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.087887 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="811f6598-f603-4a15-8dec-add067d82d5c" containerName="oauth-openshift" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.087973 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb5b7dc0-6b27-4994-9eb7-db65746b4f97" containerName="installer" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.088038 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.088442 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.096696 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.097256 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.097654 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.097795 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.098225 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.098395 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.098415 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.098462 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.098412 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.098470 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.101525 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.101577 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.108076 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.108262 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.144632 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.152677 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.163584 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-session\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.163647 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9460f6ab-fe57-49c4-bac5-da9336fc741e-audit-dir\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.163752 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-login\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.163835 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.163892 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-service-ca\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.163961 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.163984 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7tdz\" (UniqueName: \"kubernetes.io/projected/9460f6ab-fe57-49c4-bac5-da9336fc741e-kube-api-access-j7tdz\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.164014 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-router-certs\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.164031 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.164048 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.164066 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.164093 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.164114 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-audit-policies\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.164205 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-error\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.173414 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265489 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265532 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-service-ca\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265551 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265571 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7tdz\" (UniqueName: \"kubernetes.io/projected/9460f6ab-fe57-49c4-bac5-da9336fc741e-kube-api-access-j7tdz\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265597 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-router-certs\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265619 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265638 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265654 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265674 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265691 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-audit-policies\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265726 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-error\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265747 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-session\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265764 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9460f6ab-fe57-49c4-bac5-da9336fc741e-audit-dir\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.265782 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-login\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.267157 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.267240 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9460f6ab-fe57-49c4-bac5-da9336fc741e-audit-dir\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.267851 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-service-ca\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.268496 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-audit-policies\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.269025 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.271275 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-session\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.271299 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-login\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.271707 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.272138 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.272157 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.272412 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-template-error\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.272787 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.287561 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9460f6ab-fe57-49c4-bac5-da9336fc741e-v4-0-config-system-router-certs\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.295689 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7tdz\" (UniqueName: \"kubernetes.io/projected/9460f6ab-fe57-49c4-bac5-da9336fc741e-kube-api-access-j7tdz\") pod \"oauth-openshift-79945c7d7f-bhfnr\" (UID: \"9460f6ab-fe57-49c4-bac5-da9336fc741e\") " pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.405938 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.410545 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.416080 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.441004 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.451855 4840 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.584044 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.601036 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.610077 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.677478 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.759771 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.800246 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.869669 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.875178 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.931411 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.966379 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.981185 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 15:03:29 crc kubenswrapper[4840]: I1205 15:03:29.984247 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.021924 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-79945c7d7f-bhfnr"] Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.041390 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.063393 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.104289 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.104889 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.187183 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.221257 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.224438 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.245801 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.288560 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.369616 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.377222 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.442522 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.450328 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.518668 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.529439 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.544856 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.556196 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.607209 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.779014 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.844003 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.902153 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.908466 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.942429 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.957784 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 15:03:30 crc kubenswrapper[4840]: I1205 15:03:30.963396 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.057522 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.124834 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.264016 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.335659 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.397954 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.444251 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.445386 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.454714 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.480442 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.642777 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.754549 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.883746 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.916561 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 15:03:31 crc kubenswrapper[4840]: I1205 15:03:31.925472 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.377955 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.437248 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.517546 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.593320 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.623595 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.646948 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-79945c7d7f-bhfnr"] Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.703193 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 15:03:32 crc kubenswrapper[4840]: I1205 15:03:32.945217 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.004757 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.059392 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.197814 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.198939 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.530636 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.551043 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" event={"ID":"9460f6ab-fe57-49c4-bac5-da9336fc741e","Type":"ContainerStarted","Data":"e03d17f63e8123c5e4134c810534322706c83fe850a44516eb6087041750e9ab"} Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.616088 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.674679 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 15:03:33 crc kubenswrapper[4840]: I1205 15:03:33.781133 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.006076 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.389603 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.425806 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.541643 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.558122 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" event={"ID":"9460f6ab-fe57-49c4-bac5-da9336fc741e","Type":"ContainerStarted","Data":"1f5a0ce96800f517a5aed6d6c7e2a6d76055e92ae1524a902fbafef369e3f7a7"} Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.558568 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.561478 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.561535 4840 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c5650ce023caa08166097767375f9e59662a9c47edc9ac7ff6a5f7bc9136b7a9" exitCode=137 Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.564938 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.585282 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-79945c7d7f-bhfnr" podStartSLOduration=64.585255051 podStartE2EDuration="1m4.585255051s" podCreationTimestamp="2025-12-05 15:02:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:03:34.581557793 +0000 UTC m=+292.922620407" watchObservedRunningTime="2025-12-05 15:03:34.585255051 +0000 UTC m=+292.926317665" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.662549 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.663212 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794167 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794238 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794278 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794341 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794398 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794828 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794891 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794910 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.794933 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.805233 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.881764 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.896140 4840 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.896246 4840 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.896261 4840 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.896271 4840 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:34 crc kubenswrapper[4840]: I1205 15:03:34.896281 4840 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:35 crc kubenswrapper[4840]: I1205 15:03:35.266722 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 15:03:35 crc kubenswrapper[4840]: I1205 15:03:35.567935 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 15:03:35 crc kubenswrapper[4840]: I1205 15:03:35.568072 4840 scope.go:117] "RemoveContainer" containerID="c5650ce023caa08166097767375f9e59662a9c47edc9ac7ff6a5f7bc9136b7a9" Dec 05 15:03:35 crc kubenswrapper[4840]: I1205 15:03:35.568073 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 15:03:35 crc kubenswrapper[4840]: I1205 15:03:35.986585 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 15:03:36 crc kubenswrapper[4840]: I1205 15:03:36.079095 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 15:03:36 crc kubenswrapper[4840]: I1205 15:03:36.079644 4840 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 05 15:03:36 crc kubenswrapper[4840]: I1205 15:03:36.091203 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 15:03:36 crc kubenswrapper[4840]: I1205 15:03:36.091238 4840 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="e224b3b1-69b9-41cc-b3cb-f54848e54f4a" Dec 05 15:03:36 crc kubenswrapper[4840]: I1205 15:03:36.094744 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 15:03:36 crc kubenswrapper[4840]: I1205 15:03:36.094789 4840 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="e224b3b1-69b9-41cc-b3cb-f54848e54f4a" Dec 05 15:03:36 crc kubenswrapper[4840]: I1205 15:03:36.129034 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.232277 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8t4w9"] Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.233260 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" podUID="6e933683-f464-453c-8228-97b31d8b1f42" containerName="controller-manager" containerID="cri-o://d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d" gracePeriod=30 Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.342958 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv"] Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.343436 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" podUID="6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" containerName="route-controller-manager" containerID="cri-o://9cef40518ee075e6cb6da9e6b06220abfe6e6a4faa8b12bce2460a5488c390ac" gracePeriod=30 Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.670793 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.683518 4840 generic.go:334] "Generic (PLEG): container finished" podID="6e933683-f464-453c-8228-97b31d8b1f42" containerID="d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d" exitCode=0 Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.683591 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" event={"ID":"6e933683-f464-453c-8228-97b31d8b1f42","Type":"ContainerDied","Data":"d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d"} Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.683622 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" event={"ID":"6e933683-f464-453c-8228-97b31d8b1f42","Type":"ContainerDied","Data":"3346da2f7595200e63558e8a3ac3cf474a9af10b349135903a722f4f4d9083ef"} Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.683641 4840 scope.go:117] "RemoveContainer" containerID="d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.683744 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8t4w9" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.688704 4840 generic.go:334] "Generic (PLEG): container finished" podID="6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" containerID="9cef40518ee075e6cb6da9e6b06220abfe6e6a4faa8b12bce2460a5488c390ac" exitCode=0 Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.688749 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" event={"ID":"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6","Type":"ContainerDied","Data":"9cef40518ee075e6cb6da9e6b06220abfe6e6a4faa8b12bce2460a5488c390ac"} Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.714114 4840 scope.go:117] "RemoveContainer" containerID="d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d" Dec 05 15:03:55 crc kubenswrapper[4840]: E1205 15:03:55.714779 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d\": container with ID starting with d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d not found: ID does not exist" containerID="d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.714827 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d"} err="failed to get container status \"d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d\": rpc error: code = NotFound desc = could not find container \"d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d\": container with ID starting with d55dd44445405799a32bb1f5b512dce4446cbd51a36278092e08f5c59aeee75d not found: ID does not exist" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.748350 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816631 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-serving-cert\") pod \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816692 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-client-ca\") pod \"6e933683-f464-453c-8228-97b31d8b1f42\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816736 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-proxy-ca-bundles\") pod \"6e933683-f464-453c-8228-97b31d8b1f42\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816773 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvb2t\" (UniqueName: \"kubernetes.io/projected/6e933683-f464-453c-8228-97b31d8b1f42-kube-api-access-mvb2t\") pod \"6e933683-f464-453c-8228-97b31d8b1f42\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816812 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-config\") pod \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816839 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e933683-f464-453c-8228-97b31d8b1f42-serving-cert\") pod \"6e933683-f464-453c-8228-97b31d8b1f42\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816910 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-config\") pod \"6e933683-f464-453c-8228-97b31d8b1f42\" (UID: \"6e933683-f464-453c-8228-97b31d8b1f42\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816940 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-client-ca\") pod \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.816971 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc27f\" (UniqueName: \"kubernetes.io/projected/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-kube-api-access-qc27f\") pod \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\" (UID: \"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6\") " Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.818973 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-client-ca" (OuterVolumeSpecName: "client-ca") pod "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" (UID: "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.819550 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6e933683-f464-453c-8228-97b31d8b1f42" (UID: "6e933683-f464-453c-8228-97b31d8b1f42"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.819577 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-config" (OuterVolumeSpecName: "config") pod "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" (UID: "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.819816 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-client-ca" (OuterVolumeSpecName: "client-ca") pod "6e933683-f464-453c-8228-97b31d8b1f42" (UID: "6e933683-f464-453c-8228-97b31d8b1f42"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.820066 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-config" (OuterVolumeSpecName: "config") pod "6e933683-f464-453c-8228-97b31d8b1f42" (UID: "6e933683-f464-453c-8228-97b31d8b1f42"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.824791 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e933683-f464-453c-8228-97b31d8b1f42-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6e933683-f464-453c-8228-97b31d8b1f42" (UID: "6e933683-f464-453c-8228-97b31d8b1f42"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.824847 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" (UID: "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.825044 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-kube-api-access-qc27f" (OuterVolumeSpecName: "kube-api-access-qc27f") pod "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" (UID: "6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6"). InnerVolumeSpecName "kube-api-access-qc27f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.825335 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e933683-f464-453c-8228-97b31d8b1f42-kube-api-access-mvb2t" (OuterVolumeSpecName: "kube-api-access-mvb2t") pod "6e933683-f464-453c-8228-97b31d8b1f42" (UID: "6e933683-f464-453c-8228-97b31d8b1f42"). InnerVolumeSpecName "kube-api-access-mvb2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917875 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917911 4840 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917923 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc27f\" (UniqueName: \"kubernetes.io/projected/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-kube-api-access-qc27f\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917932 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917939 4840 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917947 4840 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6e933683-f464-453c-8228-97b31d8b1f42-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917955 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvb2t\" (UniqueName: \"kubernetes.io/projected/6e933683-f464-453c-8228-97b31d8b1f42-kube-api-access-mvb2t\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917962 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:55 crc kubenswrapper[4840]: I1205 15:03:55.917970 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e933683-f464-453c-8228-97b31d8b1f42-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.013676 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8t4w9"] Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.018718 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8t4w9"] Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.072546 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e933683-f464-453c-8228-97b31d8b1f42" path="/var/lib/kubelet/pods/6e933683-f464-453c-8228-97b31d8b1f42/volumes" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.696265 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" event={"ID":"6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6","Type":"ContainerDied","Data":"495e9d331ad54bd3b6771eb91800429cc75360d347d2ab28f56077afcf74bada"} Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.696550 4840 scope.go:117] "RemoveContainer" containerID="9cef40518ee075e6cb6da9e6b06220abfe6e6a4faa8b12bce2460a5488c390ac" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.696327 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.714319 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv"] Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.718602 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tq2cv"] Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.963324 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb"] Dec 05 15:03:56 crc kubenswrapper[4840]: E1205 15:03:56.963592 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e933683-f464-453c-8228-97b31d8b1f42" containerName="controller-manager" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.963606 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e933683-f464-453c-8228-97b31d8b1f42" containerName="controller-manager" Dec 05 15:03:56 crc kubenswrapper[4840]: E1205 15:03:56.963621 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" containerName="route-controller-manager" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.963627 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" containerName="route-controller-manager" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.963726 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" containerName="route-controller-manager" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.963738 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e933683-f464-453c-8228-97b31d8b1f42" containerName="controller-manager" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.964127 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.967408 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.967744 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.967953 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.968116 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.968262 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.969281 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58984d85d9-vkxzv"] Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.970209 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.970937 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.972570 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.972830 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.973256 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.973385 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb"] Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.973442 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.974148 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.975310 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.978209 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58984d85d9-vkxzv"] Dec 05 15:03:56 crc kubenswrapper[4840]: I1205 15:03:56.982584 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.130297 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d406364-372a-4bf3-8115-308facc3eb0c-client-ca\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.130335 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-client-ca\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.130369 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-proxy-ca-bundles\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.130395 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hltmf\" (UniqueName: \"kubernetes.io/projected/422f9259-28be-4590-adf5-709666a3cd4f-kube-api-access-hltmf\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.130907 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d406364-372a-4bf3-8115-308facc3eb0c-serving-cert\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.131005 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npgrc\" (UniqueName: \"kubernetes.io/projected/6d406364-372a-4bf3-8115-308facc3eb0c-kube-api-access-npgrc\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.131068 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d406364-372a-4bf3-8115-308facc3eb0c-config\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.131098 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-config\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.131148 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/422f9259-28be-4590-adf5-709666a3cd4f-serving-cert\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232015 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npgrc\" (UniqueName: \"kubernetes.io/projected/6d406364-372a-4bf3-8115-308facc3eb0c-kube-api-access-npgrc\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232067 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d406364-372a-4bf3-8115-308facc3eb0c-config\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232097 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-config\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232140 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/422f9259-28be-4590-adf5-709666a3cd4f-serving-cert\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232178 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d406364-372a-4bf3-8115-308facc3eb0c-client-ca\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232210 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-client-ca\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232261 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-proxy-ca-bundles\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232294 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hltmf\" (UniqueName: \"kubernetes.io/projected/422f9259-28be-4590-adf5-709666a3cd4f-kube-api-access-hltmf\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.232333 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d406364-372a-4bf3-8115-308facc3eb0c-serving-cert\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.233683 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-client-ca\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.233829 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-config\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.233853 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d406364-372a-4bf3-8115-308facc3eb0c-client-ca\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.234071 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d406364-372a-4bf3-8115-308facc3eb0c-config\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.234396 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-proxy-ca-bundles\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.237735 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/422f9259-28be-4590-adf5-709666a3cd4f-serving-cert\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.238313 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d406364-372a-4bf3-8115-308facc3eb0c-serving-cert\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.248780 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npgrc\" (UniqueName: \"kubernetes.io/projected/6d406364-372a-4bf3-8115-308facc3eb0c-kube-api-access-npgrc\") pod \"route-controller-manager-7b8649b98-5sdpb\" (UID: \"6d406364-372a-4bf3-8115-308facc3eb0c\") " pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.251220 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hltmf\" (UniqueName: \"kubernetes.io/projected/422f9259-28be-4590-adf5-709666a3cd4f-kube-api-access-hltmf\") pod \"controller-manager-58984d85d9-vkxzv\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.287379 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.301422 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.489487 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58984d85d9-vkxzv"] Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.533700 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb"] Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.703405 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" event={"ID":"422f9259-28be-4590-adf5-709666a3cd4f","Type":"ContainerStarted","Data":"ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d"} Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.703448 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" event={"ID":"422f9259-28be-4590-adf5-709666a3cd4f","Type":"ContainerStarted","Data":"4738ca91e0e5ccd8a255f67012be7a2b6d8f18e968e0e86cf5c9ef2d90563dbc"} Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.704232 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.705287 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" event={"ID":"6d406364-372a-4bf3-8115-308facc3eb0c","Type":"ContainerStarted","Data":"6d355518112b2dada8225a8d649b622d4dfdf61d8cf8ea1fb3fdddcbf1bafd0c"} Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.705309 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" event={"ID":"6d406364-372a-4bf3-8115-308facc3eb0c","Type":"ContainerStarted","Data":"6582607da6b0adc4b767c8390f1653ab2dceab24946221fae07a29a5feb9f8fb"} Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.705663 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.707033 4840 patch_prober.go:28] interesting pod/route-controller-manager-7b8649b98-5sdpb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.57:8443/healthz\": dial tcp 10.217.0.57:8443: connect: connection refused" start-of-body= Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.707090 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" podUID="6d406364-372a-4bf3-8115-308facc3eb0c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.57:8443/healthz\": dial tcp 10.217.0.57:8443: connect: connection refused" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.707304 4840 patch_prober.go:28] interesting pod/controller-manager-58984d85d9-vkxzv container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" start-of-body= Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.707340 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" podUID="422f9259-28be-4590-adf5-709666a3cd4f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.751928 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" podStartSLOduration=2.7519034639999997 podStartE2EDuration="2.751903464s" podCreationTimestamp="2025-12-05 15:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:03:57.728406898 +0000 UTC m=+316.069469512" watchObservedRunningTime="2025-12-05 15:03:57.751903464 +0000 UTC m=+316.092966078" Dec 05 15:03:57 crc kubenswrapper[4840]: I1205 15:03:57.756637 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" podStartSLOduration=2.756605068 podStartE2EDuration="2.756605068s" podCreationTimestamp="2025-12-05 15:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:03:57.74715681 +0000 UTC m=+316.088219424" watchObservedRunningTime="2025-12-05 15:03:57.756605068 +0000 UTC m=+316.097667682" Dec 05 15:03:58 crc kubenswrapper[4840]: I1205 15:03:58.073586 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6" path="/var/lib/kubelet/pods/6a1bc4cd-5821-4b8a-a1d4-c3d0a1cf57f6/volumes" Dec 05 15:03:58 crc kubenswrapper[4840]: I1205 15:03:58.719396 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:03:58 crc kubenswrapper[4840]: I1205 15:03:58.720163 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b8649b98-5sdpb" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.234660 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58984d85d9-vkxzv"] Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.236842 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" podUID="422f9259-28be-4590-adf5-709666a3cd4f" containerName="controller-manager" containerID="cri-o://ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d" gracePeriod=30 Dec 05 15:04:15 crc kubenswrapper[4840]: E1205 15:04:15.402504 4840 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod422f9259_28be_4590_adf5_709666a3cd4f.slice/crio-conmon-ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d.scope\": RecentStats: unable to find data in memory cache]" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.769927 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.823172 4840 generic.go:334] "Generic (PLEG): container finished" podID="422f9259-28be-4590-adf5-709666a3cd4f" containerID="ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d" exitCode=0 Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.823226 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" event={"ID":"422f9259-28be-4590-adf5-709666a3cd4f","Type":"ContainerDied","Data":"ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d"} Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.823256 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.823277 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58984d85d9-vkxzv" event={"ID":"422f9259-28be-4590-adf5-709666a3cd4f","Type":"ContainerDied","Data":"4738ca91e0e5ccd8a255f67012be7a2b6d8f18e968e0e86cf5c9ef2d90563dbc"} Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.823297 4840 scope.go:117] "RemoveContainer" containerID="ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.841228 4840 scope.go:117] "RemoveContainer" containerID="ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d" Dec 05 15:04:15 crc kubenswrapper[4840]: E1205 15:04:15.841594 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d\": container with ID starting with ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d not found: ID does not exist" containerID="ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.841626 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d"} err="failed to get container status \"ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d\": rpc error: code = NotFound desc = could not find container \"ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d\": container with ID starting with ea9a6b04c9e5dc97c62dd18f8341904df9f68cff816ab87b25fcf8525a76295d not found: ID does not exist" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.951987 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-proxy-ca-bundles\") pod \"422f9259-28be-4590-adf5-709666a3cd4f\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.952031 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/422f9259-28be-4590-adf5-709666a3cd4f-serving-cert\") pod \"422f9259-28be-4590-adf5-709666a3cd4f\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.952060 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-config\") pod \"422f9259-28be-4590-adf5-709666a3cd4f\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.952113 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hltmf\" (UniqueName: \"kubernetes.io/projected/422f9259-28be-4590-adf5-709666a3cd4f-kube-api-access-hltmf\") pod \"422f9259-28be-4590-adf5-709666a3cd4f\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.952162 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-client-ca\") pod \"422f9259-28be-4590-adf5-709666a3cd4f\" (UID: \"422f9259-28be-4590-adf5-709666a3cd4f\") " Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.952795 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-client-ca" (OuterVolumeSpecName: "client-ca") pod "422f9259-28be-4590-adf5-709666a3cd4f" (UID: "422f9259-28be-4590-adf5-709666a3cd4f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.952914 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "422f9259-28be-4590-adf5-709666a3cd4f" (UID: "422f9259-28be-4590-adf5-709666a3cd4f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.952933 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-config" (OuterVolumeSpecName: "config") pod "422f9259-28be-4590-adf5-709666a3cd4f" (UID: "422f9259-28be-4590-adf5-709666a3cd4f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.953092 4840 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.953107 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.953116 4840 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/422f9259-28be-4590-adf5-709666a3cd4f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.956774 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/422f9259-28be-4590-adf5-709666a3cd4f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "422f9259-28be-4590-adf5-709666a3cd4f" (UID: "422f9259-28be-4590-adf5-709666a3cd4f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:04:15 crc kubenswrapper[4840]: I1205 15:04:15.961001 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/422f9259-28be-4590-adf5-709666a3cd4f-kube-api-access-hltmf" (OuterVolumeSpecName: "kube-api-access-hltmf") pod "422f9259-28be-4590-adf5-709666a3cd4f" (UID: "422f9259-28be-4590-adf5-709666a3cd4f"). InnerVolumeSpecName "kube-api-access-hltmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.054517 4840 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/422f9259-28be-4590-adf5-709666a3cd4f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.054574 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hltmf\" (UniqueName: \"kubernetes.io/projected/422f9259-28be-4590-adf5-709666a3cd4f-kube-api-access-hltmf\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.184483 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58984d85d9-vkxzv"] Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.189073 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-58984d85d9-vkxzv"] Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.968893 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-56769b5f4b-hjmbv"] Dec 05 15:04:16 crc kubenswrapper[4840]: E1205 15:04:16.969141 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="422f9259-28be-4590-adf5-709666a3cd4f" containerName="controller-manager" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.969153 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="422f9259-28be-4590-adf5-709666a3cd4f" containerName="controller-manager" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.969270 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="422f9259-28be-4590-adf5-709666a3cd4f" containerName="controller-manager" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.969646 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.971232 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.974909 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.975289 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.975313 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.976061 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.976412 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.979534 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56769b5f4b-hjmbv"] Dec 05 15:04:16 crc kubenswrapper[4840]: I1205 15:04:16.986212 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.066015 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-serving-cert\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.066087 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4b28\" (UniqueName: \"kubernetes.io/projected/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-kube-api-access-b4b28\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.066138 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-config\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.066169 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-proxy-ca-bundles\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.066225 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-client-ca\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.167496 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-config\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.167583 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-proxy-ca-bundles\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.167615 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-client-ca\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.167661 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-serving-cert\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.167701 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4b28\" (UniqueName: \"kubernetes.io/projected/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-kube-api-access-b4b28\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.169327 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-proxy-ca-bundles\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.169473 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-config\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.169649 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-client-ca\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.180568 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-serving-cert\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.183836 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4b28\" (UniqueName: \"kubernetes.io/projected/27fa624f-a4f5-4f88-8d2b-0535f1e8b121-kube-api-access-b4b28\") pod \"controller-manager-56769b5f4b-hjmbv\" (UID: \"27fa624f-a4f5-4f88-8d2b-0535f1e8b121\") " pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.285152 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:17 crc kubenswrapper[4840]: W1205 15:04:17.709788 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27fa624f_a4f5_4f88_8d2b_0535f1e8b121.slice/crio-81cba0472a80a41acfc8aaf849ffdf98833e8118882923c62e91b39314ec9095 WatchSource:0}: Error finding container 81cba0472a80a41acfc8aaf849ffdf98833e8118882923c62e91b39314ec9095: Status 404 returned error can't find the container with id 81cba0472a80a41acfc8aaf849ffdf98833e8118882923c62e91b39314ec9095 Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.711751 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-56769b5f4b-hjmbv"] Dec 05 15:04:17 crc kubenswrapper[4840]: I1205 15:04:17.835641 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" event={"ID":"27fa624f-a4f5-4f88-8d2b-0535f1e8b121","Type":"ContainerStarted","Data":"81cba0472a80a41acfc8aaf849ffdf98833e8118882923c62e91b39314ec9095"} Dec 05 15:04:18 crc kubenswrapper[4840]: I1205 15:04:18.075598 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="422f9259-28be-4590-adf5-709666a3cd4f" path="/var/lib/kubelet/pods/422f9259-28be-4590-adf5-709666a3cd4f/volumes" Dec 05 15:04:18 crc kubenswrapper[4840]: I1205 15:04:18.841780 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" event={"ID":"27fa624f-a4f5-4f88-8d2b-0535f1e8b121","Type":"ContainerStarted","Data":"9701da8227622df14099e11436b01236ebac8a956daee23e36f56df5eaa2d51f"} Dec 05 15:04:18 crc kubenswrapper[4840]: I1205 15:04:18.842128 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:18 crc kubenswrapper[4840]: I1205 15:04:18.847613 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" Dec 05 15:04:18 crc kubenswrapper[4840]: I1205 15:04:18.857891 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-56769b5f4b-hjmbv" podStartSLOduration=3.857854426 podStartE2EDuration="3.857854426s" podCreationTimestamp="2025-12-05 15:04:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:04:18.856390825 +0000 UTC m=+337.197453439" watchObservedRunningTime="2025-12-05 15:04:18.857854426 +0000 UTC m=+337.198917040" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.351086 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q9jjl"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.351764 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q9jjl" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="registry-server" containerID="cri-o://fa4f5f2ec8eb9d070f56c8245a016e5c7466e7de0387c00c05691554ba7208fc" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.363637 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgz4z"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.363911 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vgz4z" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="registry-server" containerID="cri-o://823ac419b97f7a5053448c40d0322153c59d321d6315151f6d823df9eeaab6d8" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.370371 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2z4bf"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.370598 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2z4bf" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="registry-server" containerID="cri-o://36361fd60bda7a7192f3b3c5c71e9c97dbdcd457ceb7915d08f5c05194333f6d" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.378960 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xc72x"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.379183 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xc72x" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="registry-server" containerID="cri-o://57d09f65354c43dc470b6e2a65b668adba4974d4af62cbf5f55b8d2887de43e9" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.388411 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nc87t"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.388597 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" podUID="f9f44648-d5f1-49ee-a394-115e43c97fc9" containerName="marketplace-operator" containerID="cri-o://c29e94fec24fa6a3b89007ce6172a247b2109b25d1ec64a168b4f2e563688ef8" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.399168 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwvqf"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.399390 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gwvqf" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="registry-server" containerID="cri-o://8fa50c9170cacedef1105761d60bbb70a7702a0e13feaf14e69840a1d32e8264" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.415603 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqls"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.415894 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-scqls" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="registry-server" containerID="cri-o://4f0999ddd0d56682d452b276e9a0de9689245d98b9c4745fac4efdd38ccf3729" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.431575 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hdjqt"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.431880 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hdjqt" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="registry-server" containerID="cri-o://e302deb3079d4f440df0b6b5b13dd17c2582341fb875cfd53560982ebb98fe36" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.468682 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sm6c5"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.468935 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sm6c5" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="registry-server" containerID="cri-o://2330eeea92785e54774a1e51df6a70a7acb18a4600c89b8fb547537fc777eba4" gracePeriod=30 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.568178 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mv2cn"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.568822 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.579444 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mv2cn"] Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.758568 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1581af24-4229-4cac-a548-20cafe277dff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.758791 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1581af24-4229-4cac-a548-20cafe277dff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.758893 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcpzc\" (UniqueName: \"kubernetes.io/projected/1581af24-4229-4cac-a548-20cafe277dff-kube-api-access-pcpzc\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.861324 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1581af24-4229-4cac-a548-20cafe277dff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.861387 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcpzc\" (UniqueName: \"kubernetes.io/projected/1581af24-4229-4cac-a548-20cafe277dff-kube-api-access-pcpzc\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.861482 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1581af24-4229-4cac-a548-20cafe277dff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.862653 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1581af24-4229-4cac-a548-20cafe277dff-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.871280 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1581af24-4229-4cac-a548-20cafe277dff-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.878022 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcpzc\" (UniqueName: \"kubernetes.io/projected/1581af24-4229-4cac-a548-20cafe277dff-kube-api-access-pcpzc\") pod \"marketplace-operator-79b997595-mv2cn\" (UID: \"1581af24-4229-4cac-a548-20cafe277dff\") " pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.898130 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.971742 4840 generic.go:334] "Generic (PLEG): container finished" podID="a6138547-0c56-4951-bab0-283fe8649655" containerID="36361fd60bda7a7192f3b3c5c71e9c97dbdcd457ceb7915d08f5c05194333f6d" exitCode=0 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.971959 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z4bf" event={"ID":"a6138547-0c56-4951-bab0-283fe8649655","Type":"ContainerDied","Data":"36361fd60bda7a7192f3b3c5c71e9c97dbdcd457ceb7915d08f5c05194333f6d"} Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.977582 4840 generic.go:334] "Generic (PLEG): container finished" podID="d59d7b71-22f9-49c1-9415-f420122f72df" containerID="57d09f65354c43dc470b6e2a65b668adba4974d4af62cbf5f55b8d2887de43e9" exitCode=0 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.977636 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xc72x" event={"ID":"d59d7b71-22f9-49c1-9415-f420122f72df","Type":"ContainerDied","Data":"57d09f65354c43dc470b6e2a65b668adba4974d4af62cbf5f55b8d2887de43e9"} Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.979898 4840 generic.go:334] "Generic (PLEG): container finished" podID="516530e0-a660-4755-8d26-b7c798a43428" containerID="fa4f5f2ec8eb9d070f56c8245a016e5c7466e7de0387c00c05691554ba7208fc" exitCode=0 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.979963 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9jjl" event={"ID":"516530e0-a660-4755-8d26-b7c798a43428","Type":"ContainerDied","Data":"fa4f5f2ec8eb9d070f56c8245a016e5c7466e7de0387c00c05691554ba7208fc"} Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.989697 4840 generic.go:334] "Generic (PLEG): container finished" podID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerID="e302deb3079d4f440df0b6b5b13dd17c2582341fb875cfd53560982ebb98fe36" exitCode=0 Dec 05 15:04:33 crc kubenswrapper[4840]: I1205 15:04:33.989862 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdjqt" event={"ID":"a8f9a961-ee6b-429f-b07e-12ee35a7c986","Type":"ContainerDied","Data":"e302deb3079d4f440df0b6b5b13dd17c2582341fb875cfd53560982ebb98fe36"} Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:33.994411 4840 generic.go:334] "Generic (PLEG): container finished" podID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerID="8fa50c9170cacedef1105761d60bbb70a7702a0e13feaf14e69840a1d32e8264" exitCode=0 Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:33.994489 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwvqf" event={"ID":"c6e0195b-10e8-465d-9e3d-548633d29ed7","Type":"ContainerDied","Data":"8fa50c9170cacedef1105761d60bbb70a7702a0e13feaf14e69840a1d32e8264"} Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:33.998478 4840 generic.go:334] "Generic (PLEG): container finished" podID="f9f44648-d5f1-49ee-a394-115e43c97fc9" containerID="c29e94fec24fa6a3b89007ce6172a247b2109b25d1ec64a168b4f2e563688ef8" exitCode=0 Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:33.998573 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" event={"ID":"f9f44648-d5f1-49ee-a394-115e43c97fc9","Type":"ContainerDied","Data":"c29e94fec24fa6a3b89007ce6172a247b2109b25d1ec64a168b4f2e563688ef8"} Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.077022 4840 generic.go:334] "Generic (PLEG): container finished" podID="fc195b16-d39f-4786-a866-aab4d3377d52" containerID="4f0999ddd0d56682d452b276e9a0de9689245d98b9c4745fac4efdd38ccf3729" exitCode=0 Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.082489 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerDied","Data":"4f0999ddd0d56682d452b276e9a0de9689245d98b9c4745fac4efdd38ccf3729"} Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.093833 4840 generic.go:334] "Generic (PLEG): container finished" podID="2b685425-9cba-4168-a2a6-a4a707989b01" containerID="823ac419b97f7a5053448c40d0322153c59d321d6315151f6d823df9eeaab6d8" exitCode=0 Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.093937 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgz4z" event={"ID":"2b685425-9cba-4168-a2a6-a4a707989b01","Type":"ContainerDied","Data":"823ac419b97f7a5053448c40d0322153c59d321d6315151f6d823df9eeaab6d8"} Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.126820 4840 generic.go:334] "Generic (PLEG): container finished" podID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerID="2330eeea92785e54774a1e51df6a70a7acb18a4600c89b8fb547537fc777eba4" exitCode=0 Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.126874 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerDied","Data":"2330eeea92785e54774a1e51df6a70a7acb18a4600c89b8fb547537fc777eba4"} Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.686175 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.790350 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmsqh\" (UniqueName: \"kubernetes.io/projected/c6e0195b-10e8-465d-9e3d-548633d29ed7-kube-api-access-rmsqh\") pod \"c6e0195b-10e8-465d-9e3d-548633d29ed7\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.790729 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-catalog-content\") pod \"c6e0195b-10e8-465d-9e3d-548633d29ed7\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.790794 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-utilities\") pod \"c6e0195b-10e8-465d-9e3d-548633d29ed7\" (UID: \"c6e0195b-10e8-465d-9e3d-548633d29ed7\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.791643 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-utilities" (OuterVolumeSpecName: "utilities") pod "c6e0195b-10e8-465d-9e3d-548633d29ed7" (UID: "c6e0195b-10e8-465d-9e3d-548633d29ed7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.795925 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6e0195b-10e8-465d-9e3d-548633d29ed7-kube-api-access-rmsqh" (OuterVolumeSpecName: "kube-api-access-rmsqh") pod "c6e0195b-10e8-465d-9e3d-548633d29ed7" (UID: "c6e0195b-10e8-465d-9e3d-548633d29ed7"). InnerVolumeSpecName "kube-api-access-rmsqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.801442 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.807117 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.813216 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.817790 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.825322 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6e0195b-10e8-465d-9e3d-548633d29ed7" (UID: "c6e0195b-10e8-465d-9e3d-548633d29ed7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.866375 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.870472 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.885335 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.886325 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.891979 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-catalog-content\") pod \"d59d7b71-22f9-49c1-9415-f420122f72df\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.892172 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsp47\" (UniqueName: \"kubernetes.io/projected/a6138547-0c56-4951-bab0-283fe8649655-kube-api-access-lsp47\") pod \"a6138547-0c56-4951-bab0-283fe8649655\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.892286 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-utilities\") pod \"2b685425-9cba-4168-a2a6-a4a707989b01\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.892405 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-utilities\") pod \"fc195b16-d39f-4786-a866-aab4d3377d52\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.892513 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-utilities\") pod \"d59d7b71-22f9-49c1-9415-f420122f72df\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.892668 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-catalog-content\") pod \"a7d16875-422c-4a41-8fd2-498bb020ab9a\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.892781 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-utilities\") pod \"a6138547-0c56-4951-bab0-283fe8649655\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.892895 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfzj8\" (UniqueName: \"kubernetes.io/projected/a7d16875-422c-4a41-8fd2-498bb020ab9a-kube-api-access-cfzj8\") pod \"a7d16875-422c-4a41-8fd2-498bb020ab9a\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893007 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-trusted-ca\") pod \"f9f44648-d5f1-49ee-a394-115e43c97fc9\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893121 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-catalog-content\") pod \"a6138547-0c56-4951-bab0-283fe8649655\" (UID: \"a6138547-0c56-4951-bab0-283fe8649655\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893223 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-operator-metrics\") pod \"f9f44648-d5f1-49ee-a394-115e43c97fc9\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893344 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lzst\" (UniqueName: \"kubernetes.io/projected/fc195b16-d39f-4786-a866-aab4d3377d52-kube-api-access-8lzst\") pod \"fc195b16-d39f-4786-a866-aab4d3377d52\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893453 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr8jp\" (UniqueName: \"kubernetes.io/projected/f9f44648-d5f1-49ee-a394-115e43c97fc9-kube-api-access-lr8jp\") pod \"f9f44648-d5f1-49ee-a394-115e43c97fc9\" (UID: \"f9f44648-d5f1-49ee-a394-115e43c97fc9\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893566 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c72mj\" (UniqueName: \"kubernetes.io/projected/2b685425-9cba-4168-a2a6-a4a707989b01-kube-api-access-c72mj\") pod \"2b685425-9cba-4168-a2a6-a4a707989b01\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893675 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-catalog-content\") pod \"2b685425-9cba-4168-a2a6-a4a707989b01\" (UID: \"2b685425-9cba-4168-a2a6-a4a707989b01\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893776 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7brzn\" (UniqueName: \"kubernetes.io/projected/d59d7b71-22f9-49c1-9415-f420122f72df-kube-api-access-7brzn\") pod \"d59d7b71-22f9-49c1-9415-f420122f72df\" (UID: \"d59d7b71-22f9-49c1-9415-f420122f72df\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.893907 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-utilities\") pod \"a7d16875-422c-4a41-8fd2-498bb020ab9a\" (UID: \"a7d16875-422c-4a41-8fd2-498bb020ab9a\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.894031 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-catalog-content\") pod \"fc195b16-d39f-4786-a866-aab4d3377d52\" (UID: \"fc195b16-d39f-4786-a866-aab4d3377d52\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.894378 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.894749 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6e0195b-10e8-465d-9e3d-548633d29ed7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.894859 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmsqh\" (UniqueName: \"kubernetes.io/projected/c6e0195b-10e8-465d-9e3d-548633d29ed7-kube-api-access-rmsqh\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.896450 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "f9f44648-d5f1-49ee-a394-115e43c97fc9" (UID: "f9f44648-d5f1-49ee-a394-115e43c97fc9"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.897191 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-utilities" (OuterVolumeSpecName: "utilities") pod "d59d7b71-22f9-49c1-9415-f420122f72df" (UID: "d59d7b71-22f9-49c1-9415-f420122f72df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.897745 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-utilities" (OuterVolumeSpecName: "utilities") pod "2b685425-9cba-4168-a2a6-a4a707989b01" (UID: "2b685425-9cba-4168-a2a6-a4a707989b01"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.898016 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-utilities" (OuterVolumeSpecName: "utilities") pod "fc195b16-d39f-4786-a866-aab4d3377d52" (UID: "fc195b16-d39f-4786-a866-aab4d3377d52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.898636 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-utilities" (OuterVolumeSpecName: "utilities") pod "a6138547-0c56-4951-bab0-283fe8649655" (UID: "a6138547-0c56-4951-bab0-283fe8649655"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.904136 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b685425-9cba-4168-a2a6-a4a707989b01-kube-api-access-c72mj" (OuterVolumeSpecName: "kube-api-access-c72mj") pod "2b685425-9cba-4168-a2a6-a4a707989b01" (UID: "2b685425-9cba-4168-a2a6-a4a707989b01"). InnerVolumeSpecName "kube-api-access-c72mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.904916 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7d16875-422c-4a41-8fd2-498bb020ab9a-kube-api-access-cfzj8" (OuterVolumeSpecName: "kube-api-access-cfzj8") pod "a7d16875-422c-4a41-8fd2-498bb020ab9a" (UID: "a7d16875-422c-4a41-8fd2-498bb020ab9a"). InnerVolumeSpecName "kube-api-access-cfzj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.905559 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc195b16-d39f-4786-a866-aab4d3377d52-kube-api-access-8lzst" (OuterVolumeSpecName: "kube-api-access-8lzst") pod "fc195b16-d39f-4786-a866-aab4d3377d52" (UID: "fc195b16-d39f-4786-a866-aab4d3377d52"). InnerVolumeSpecName "kube-api-access-8lzst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.905576 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "f9f44648-d5f1-49ee-a394-115e43c97fc9" (UID: "f9f44648-d5f1-49ee-a394-115e43c97fc9"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.907585 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-utilities" (OuterVolumeSpecName: "utilities") pod "a7d16875-422c-4a41-8fd2-498bb020ab9a" (UID: "a7d16875-422c-4a41-8fd2-498bb020ab9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.909092 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9f44648-d5f1-49ee-a394-115e43c97fc9-kube-api-access-lr8jp" (OuterVolumeSpecName: "kube-api-access-lr8jp") pod "f9f44648-d5f1-49ee-a394-115e43c97fc9" (UID: "f9f44648-d5f1-49ee-a394-115e43c97fc9"). InnerVolumeSpecName "kube-api-access-lr8jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.910744 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6138547-0c56-4951-bab0-283fe8649655-kube-api-access-lsp47" (OuterVolumeSpecName: "kube-api-access-lsp47") pod "a6138547-0c56-4951-bab0-283fe8649655" (UID: "a6138547-0c56-4951-bab0-283fe8649655"). InnerVolumeSpecName "kube-api-access-lsp47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.919546 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d59d7b71-22f9-49c1-9415-f420122f72df-kube-api-access-7brzn" (OuterVolumeSpecName: "kube-api-access-7brzn") pod "d59d7b71-22f9-49c1-9415-f420122f72df" (UID: "d59d7b71-22f9-49c1-9415-f420122f72df"). InnerVolumeSpecName "kube-api-access-7brzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.987551 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc195b16-d39f-4786-a866-aab4d3377d52" (UID: "fc195b16-d39f-4786-a866-aab4d3377d52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.998357 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpcnj\" (UniqueName: \"kubernetes.io/projected/a8f9a961-ee6b-429f-b07e-12ee35a7c986-kube-api-access-tpcnj\") pod \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.998704 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-catalog-content\") pod \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.999361 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-utilities\") pod \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\" (UID: \"a8f9a961-ee6b-429f-b07e-12ee35a7c986\") " Dec 05 15:04:34 crc kubenswrapper[4840]: I1205 15:04:34.999892 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw7nd\" (UniqueName: \"kubernetes.io/projected/516530e0-a660-4755-8d26-b7c798a43428-kube-api-access-tw7nd\") pod \"516530e0-a660-4755-8d26-b7c798a43428\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.000470 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-utilities\") pod \"516530e0-a660-4755-8d26-b7c798a43428\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.000796 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-catalog-content\") pod \"516530e0-a660-4755-8d26-b7c798a43428\" (UID: \"516530e0-a660-4755-8d26-b7c798a43428\") " Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.001359 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-utilities" (OuterVolumeSpecName: "utilities") pod "a8f9a961-ee6b-429f-b07e-12ee35a7c986" (UID: "a8f9a961-ee6b-429f-b07e-12ee35a7c986"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.001506 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8f9a961-ee6b-429f-b07e-12ee35a7c986-kube-api-access-tpcnj" (OuterVolumeSpecName: "kube-api-access-tpcnj") pod "a8f9a961-ee6b-429f-b07e-12ee35a7c986" (UID: "a8f9a961-ee6b-429f-b07e-12ee35a7c986"). InnerVolumeSpecName "kube-api-access-tpcnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.002111 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-utilities" (OuterVolumeSpecName: "utilities") pod "516530e0-a660-4755-8d26-b7c798a43428" (UID: "516530e0-a660-4755-8d26-b7c798a43428"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.004966 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lzst\" (UniqueName: \"kubernetes.io/projected/fc195b16-d39f-4786-a866-aab4d3377d52-kube-api-access-8lzst\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.005277 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/516530e0-a660-4755-8d26-b7c798a43428-kube-api-access-tw7nd" (OuterVolumeSpecName: "kube-api-access-tw7nd") pod "516530e0-a660-4755-8d26-b7c798a43428" (UID: "516530e0-a660-4755-8d26-b7c798a43428"). InnerVolumeSpecName "kube-api-access-tw7nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.005660 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr8jp\" (UniqueName: \"kubernetes.io/projected/f9f44648-d5f1-49ee-a394-115e43c97fc9-kube-api-access-lr8jp\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.005957 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c72mj\" (UniqueName: \"kubernetes.io/projected/2b685425-9cba-4168-a2a6-a4a707989b01-kube-api-access-c72mj\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.006174 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.006354 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7brzn\" (UniqueName: \"kubernetes.io/projected/d59d7b71-22f9-49c1-9415-f420122f72df-kube-api-access-7brzn\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.006626 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.006832 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpcnj\" (UniqueName: \"kubernetes.io/projected/a8f9a961-ee6b-429f-b07e-12ee35a7c986-kube-api-access-tpcnj\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.007247 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.007664 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.008192 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsp47\" (UniqueName: \"kubernetes.io/projected/a6138547-0c56-4951-bab0-283fe8649655-kube-api-access-lsp47\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.008938 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.008978 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc195b16-d39f-4786-a866-aab4d3377d52-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.008990 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.009002 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.009015 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfzj8\" (UniqueName: \"kubernetes.io/projected/a7d16875-422c-4a41-8fd2-498bb020ab9a-kube-api-access-cfzj8\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.009027 4840 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.009039 4840 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f9f44648-d5f1-49ee-a394-115e43c97fc9-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.024986 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d59d7b71-22f9-49c1-9415-f420122f72df" (UID: "d59d7b71-22f9-49c1-9415-f420122f72df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.025637 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b685425-9cba-4168-a2a6-a4a707989b01" (UID: "2b685425-9cba-4168-a2a6-a4a707989b01"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.026728 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6138547-0c56-4951-bab0-283fe8649655" (UID: "a6138547-0c56-4951-bab0-283fe8649655"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.058218 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "516530e0-a660-4755-8d26-b7c798a43428" (UID: "516530e0-a660-4755-8d26-b7c798a43428"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.078332 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7d16875-422c-4a41-8fd2-498bb020ab9a" (UID: "a7d16875-422c-4a41-8fd2-498bb020ab9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.110045 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw7nd\" (UniqueName: \"kubernetes.io/projected/516530e0-a660-4755-8d26-b7c798a43428-kube-api-access-tw7nd\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.110079 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7d16875-422c-4a41-8fd2-498bb020ab9a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.110091 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6138547-0c56-4951-bab0-283fe8649655-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.110103 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b685425-9cba-4168-a2a6-a4a707989b01-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.110113 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/516530e0-a660-4755-8d26-b7c798a43428-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.110124 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d59d7b71-22f9-49c1-9415-f420122f72df-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.124752 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8f9a961-ee6b-429f-b07e-12ee35a7c986" (UID: "a8f9a961-ee6b-429f-b07e-12ee35a7c986"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.136928 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gwvqf" event={"ID":"c6e0195b-10e8-465d-9e3d-548633d29ed7","Type":"ContainerDied","Data":"85e87d404ca3c0ef85fd4e6d315997ef97028c32d3ca908c1b6aa2e31d31f3c4"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.137003 4840 scope.go:117] "RemoveContainer" containerID="8fa50c9170cacedef1105761d60bbb70a7702a0e13feaf14e69840a1d32e8264" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.137022 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gwvqf" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.138765 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdjqt" event={"ID":"a8f9a961-ee6b-429f-b07e-12ee35a7c986","Type":"ContainerDied","Data":"ec0433b010919d2f78398e9e3f824f93f7f9af676c78381bc5a472a2208dca86"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.138787 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdjqt" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.141317 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgz4z" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.141429 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgz4z" event={"ID":"2b685425-9cba-4168-a2a6-a4a707989b01","Type":"ContainerDied","Data":"0c515eeef51c278fa58010986c6bad649794610bb07d53d2f5c35df56fcb65eb"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.145081 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2z4bf" event={"ID":"a6138547-0c56-4951-bab0-283fe8649655","Type":"ContainerDied","Data":"d762001bb50fef73518ab512bdc0b9b9eae44817a27bfb3db034933b38517ce5"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.145192 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2z4bf" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.148768 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqls" event={"ID":"fc195b16-d39f-4786-a866-aab4d3377d52","Type":"ContainerDied","Data":"1ff0c16917400c645491dc60b5266862d7f10652f69e561d67de1080fa459b99"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.148845 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scqls" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.154626 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sm6c5" event={"ID":"a7d16875-422c-4a41-8fd2-498bb020ab9a","Type":"ContainerDied","Data":"d8c4d8b9f96e530db6e65b3bb31bb78867e725f7a062d3bb6736ba30f7ee4dd4"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.154648 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sm6c5" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.158001 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xc72x" event={"ID":"d59d7b71-22f9-49c1-9415-f420122f72df","Type":"ContainerDied","Data":"5ded5dc137717b428e636ca72a0ea30fdf38a92f2c0e5fea69fc2802a6d44498"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.158103 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xc72x" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.164226 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q9jjl" event={"ID":"516530e0-a660-4755-8d26-b7c798a43428","Type":"ContainerDied","Data":"529600e2fa37a104b52d3ec5d17acbc240a428f6ebc5935ca711afc2b28103d1"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.164340 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q9jjl" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.167706 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" event={"ID":"f9f44648-d5f1-49ee-a394-115e43c97fc9","Type":"ContainerDied","Data":"a29d3bf0bd2b782009ec10ec573e33af37c3733cd9fd5454f7fa6c1ac84127b9"} Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.167783 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nc87t" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.177687 4840 scope.go:117] "RemoveContainer" containerID="b0c0669d2c6635b2bd16604753789a64509fab8f18d6cc55be33536e6c3bbc06" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.183596 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hdjqt"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.190987 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hdjqt"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.206741 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqls"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.212055 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8f9a961-ee6b-429f-b07e-12ee35a7c986-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.222154 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqls"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.247540 4840 scope.go:117] "RemoveContainer" containerID="b03d80f08fbdb37660fe078530c5a404593a4038b8f5b67efd68867091938f74" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.253434 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sm6c5"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.258203 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sm6c5"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.264564 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgz4z"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.269802 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vgz4z"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.280052 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mv2cn"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.280356 4840 scope.go:117] "RemoveContainer" containerID="e302deb3079d4f440df0b6b5b13dd17c2582341fb875cfd53560982ebb98fe36" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.284312 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xc72x"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.287042 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xc72x"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.300950 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q9jjl"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.315975 4840 scope.go:117] "RemoveContainer" containerID="c5733eec898c952e9a636e8db8194d27df554512c81fcc4a6b0426dcc2cba127" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.325531 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q9jjl"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.327094 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwvqf"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.357923 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gwvqf"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.361515 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2z4bf"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.373396 4840 scope.go:117] "RemoveContainer" containerID="0d632a8ed526e130e99d7788db3be246ee246e2ab1c2b594e95f4798b37d448b" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.376312 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2z4bf"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.382387 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nc87t"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.384150 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nc87t"] Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.395847 4840 scope.go:117] "RemoveContainer" containerID="823ac419b97f7a5053448c40d0322153c59d321d6315151f6d823df9eeaab6d8" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.411892 4840 scope.go:117] "RemoveContainer" containerID="031f04740dcae33e9a8ba79c0b30fd8d079566591b5eb4837c7d497ae2b073c1" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.431104 4840 scope.go:117] "RemoveContainer" containerID="b3a697caf600ee4e6c9f253f5968dfb9b3a01a0341f2db3690062996b72de81c" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.443262 4840 scope.go:117] "RemoveContainer" containerID="36361fd60bda7a7192f3b3c5c71e9c97dbdcd457ceb7915d08f5c05194333f6d" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.459443 4840 scope.go:117] "RemoveContainer" containerID="e0371959a85dd3e6a9a50fe7fe0b663bc8be2f7c92ddfaef448a505da843ab10" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.529143 4840 scope.go:117] "RemoveContainer" containerID="7f76fa8b9f344c1476d3c2bcc64093413948b2c5ce05342d5b0a82435a0ab5b9" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.548761 4840 scope.go:117] "RemoveContainer" containerID="4f0999ddd0d56682d452b276e9a0de9689245d98b9c4745fac4efdd38ccf3729" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.566793 4840 scope.go:117] "RemoveContainer" containerID="123c670a24708e644cf50a2c6434f66f9d2bdc73f0a25cfb9096daee408cc348" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.581393 4840 scope.go:117] "RemoveContainer" containerID="5dc3451197173ee17564fb9974b4a24c574d5d1c1a03310250de6ed13d4ed92e" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.604531 4840 scope.go:117] "RemoveContainer" containerID="2330eeea92785e54774a1e51df6a70a7acb18a4600c89b8fb547537fc777eba4" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.618135 4840 scope.go:117] "RemoveContainer" containerID="1821b226c0af7030ca6460e01717830ed14b1871e7a49df0819841306361734d" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.632121 4840 scope.go:117] "RemoveContainer" containerID="6097bb62d3c707cbd4806c7ff7d94a0899a3fbc0820afbb18b5ae18d9c3836f2" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.647985 4840 scope.go:117] "RemoveContainer" containerID="57d09f65354c43dc470b6e2a65b668adba4974d4af62cbf5f55b8d2887de43e9" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.670711 4840 scope.go:117] "RemoveContainer" containerID="48357c6d8160591afb9a0999d83e703ff8c5dba67e5b97d19e02f19de5c8b47d" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.686973 4840 scope.go:117] "RemoveContainer" containerID="7b83c7ab857dfadc8187e49f0a8fce9f6bc0353c5c1318c6f998b94b29c94d07" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.703838 4840 scope.go:117] "RemoveContainer" containerID="fa4f5f2ec8eb9d070f56c8245a016e5c7466e7de0387c00c05691554ba7208fc" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.717938 4840 scope.go:117] "RemoveContainer" containerID="11c1fe4488dc96a54b799ee9cf10f2f10bea90527daf8b9b86489230379b8c87" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.735151 4840 scope.go:117] "RemoveContainer" containerID="aa47b781797f481e47dda7b92394d6bae53a858c5bc956410477abaf003380bc" Dec 05 15:04:35 crc kubenswrapper[4840]: I1205 15:04:35.752983 4840 scope.go:117] "RemoveContainer" containerID="c29e94fec24fa6a3b89007ce6172a247b2109b25d1ec64a168b4f2e563688ef8" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.074919 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" path="/var/lib/kubelet/pods/2b685425-9cba-4168-a2a6-a4a707989b01/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.075532 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="516530e0-a660-4755-8d26-b7c798a43428" path="/var/lib/kubelet/pods/516530e0-a660-4755-8d26-b7c798a43428/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.076106 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6138547-0c56-4951-bab0-283fe8649655" path="/var/lib/kubelet/pods/a6138547-0c56-4951-bab0-283fe8649655/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.077050 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" path="/var/lib/kubelet/pods/a7d16875-422c-4a41-8fd2-498bb020ab9a/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.077585 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" path="/var/lib/kubelet/pods/a8f9a961-ee6b-429f-b07e-12ee35a7c986/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.078517 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" path="/var/lib/kubelet/pods/c6e0195b-10e8-465d-9e3d-548633d29ed7/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.079084 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" path="/var/lib/kubelet/pods/d59d7b71-22f9-49c1-9415-f420122f72df/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.080051 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9f44648-d5f1-49ee-a394-115e43c97fc9" path="/var/lib/kubelet/pods/f9f44648-d5f1-49ee-a394-115e43c97fc9/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.080530 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" path="/var/lib/kubelet/pods/fc195b16-d39f-4786-a866-aab4d3377d52/volumes" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.180234 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" event={"ID":"1581af24-4229-4cac-a548-20cafe277dff","Type":"ContainerStarted","Data":"2ac8f1854c4ac348d91254fb61adf9f5a56679abd1993bd375d032f888b0f676"} Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.180293 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" event={"ID":"1581af24-4229-4cac-a548-20cafe277dff","Type":"ContainerStarted","Data":"126847e9c2751d0f2b0cfffc3daef3bb57e4277da200b29ab6553101ade54a38"} Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.180647 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.185091 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" Dec 05 15:04:36 crc kubenswrapper[4840]: I1205 15:04:36.198040 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mv2cn" podStartSLOduration=3.198008152 podStartE2EDuration="3.198008152s" podCreationTimestamp="2025-12-05 15:04:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:04:36.196052636 +0000 UTC m=+354.537115250" watchObservedRunningTime="2025-12-05 15:04:36.198008152 +0000 UTC m=+354.539070806" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.471770 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.472242 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.776846 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hmzgl"] Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777072 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777084 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777094 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777100 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777108 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777115 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777123 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777129 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777136 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777142 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777148 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777153 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777161 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777166 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777174 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777180 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777190 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777195 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777203 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777208 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777216 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777221 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777231 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777236 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777244 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777249 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777258 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777263 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777269 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777274 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777283 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777288 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777295 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777300 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777308 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777314 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777322 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9f44648-d5f1-49ee-a394-115e43c97fc9" containerName="marketplace-operator" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777327 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9f44648-d5f1-49ee-a394-115e43c97fc9" containerName="marketplace-operator" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777335 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777340 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777348 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777354 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777361 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777366 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777372 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777377 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777384 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777390 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="extract-utilities" Dec 05 15:04:49 crc kubenswrapper[4840]: E1205 15:04:49.777398 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777403 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="extract-content" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777477 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc195b16-d39f-4786-a866-aab4d3377d52" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777487 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9f44648-d5f1-49ee-a394-115e43c97fc9" containerName="marketplace-operator" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777495 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b685425-9cba-4168-a2a6-a4a707989b01" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777506 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6e0195b-10e8-465d-9e3d-548633d29ed7" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777513 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d59d7b71-22f9-49c1-9415-f420122f72df" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777519 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6138547-0c56-4951-bab0-283fe8649655" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777529 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7d16875-422c-4a41-8fd2-498bb020ab9a" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777536 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8f9a961-ee6b-429f-b07e-12ee35a7c986" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.777545 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="516530e0-a660-4755-8d26-b7c798a43428" containerName="registry-server" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.778196 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.779808 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.821421 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmzgl"] Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.897404 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-catalog-content\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.897483 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-utilities\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.897528 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bps5d\" (UniqueName: \"kubernetes.io/projected/df7e1030-5127-493c-ab1a-a8f8ce069e83-kube-api-access-bps5d\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.976754 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-25nwq"] Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.977915 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.979564 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.989025 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-25nwq"] Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.998047 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-catalog-content\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.998110 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-utilities\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.998136 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bps5d\" (UniqueName: \"kubernetes.io/projected/df7e1030-5127-493c-ab1a-a8f8ce069e83-kube-api-access-bps5d\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.998848 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-catalog-content\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:49 crc kubenswrapper[4840]: I1205 15:04:49.999581 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-utilities\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.030653 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bps5d\" (UniqueName: \"kubernetes.io/projected/df7e1030-5127-493c-ab1a-a8f8ce069e83-kube-api-access-bps5d\") pod \"redhat-marketplace-hmzgl\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.098344 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.099000 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ef68ef3-0dac-4194-9d02-c0156a923d76-catalog-content\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.099096 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qcch\" (UniqueName: \"kubernetes.io/projected/7ef68ef3-0dac-4194-9d02-c0156a923d76-kube-api-access-4qcch\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.099172 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ef68ef3-0dac-4194-9d02-c0156a923d76-utilities\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.200713 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ef68ef3-0dac-4194-9d02-c0156a923d76-utilities\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.201085 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ef68ef3-0dac-4194-9d02-c0156a923d76-catalog-content\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.201336 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qcch\" (UniqueName: \"kubernetes.io/projected/7ef68ef3-0dac-4194-9d02-c0156a923d76-kube-api-access-4qcch\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.202257 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ef68ef3-0dac-4194-9d02-c0156a923d76-utilities\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.202269 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ef68ef3-0dac-4194-9d02-c0156a923d76-catalog-content\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.218022 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qcch\" (UniqueName: \"kubernetes.io/projected/7ef68ef3-0dac-4194-9d02-c0156a923d76-kube-api-access-4qcch\") pod \"redhat-operators-25nwq\" (UID: \"7ef68ef3-0dac-4194-9d02-c0156a923d76\") " pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.296531 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.492123 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmzgl"] Dec 05 15:04:50 crc kubenswrapper[4840]: W1205 15:04:50.495435 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf7e1030_5127_493c_ab1a_a8f8ce069e83.slice/crio-44d5e36fe4d4b7c804a5fdf15a997852bd031ca911260cf3b6c4fad59c6efba4 WatchSource:0}: Error finding container 44d5e36fe4d4b7c804a5fdf15a997852bd031ca911260cf3b6c4fad59c6efba4: Status 404 returned error can't find the container with id 44d5e36fe4d4b7c804a5fdf15a997852bd031ca911260cf3b6c4fad59c6efba4 Dec 05 15:04:50 crc kubenswrapper[4840]: I1205 15:04:50.701883 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-25nwq"] Dec 05 15:04:50 crc kubenswrapper[4840]: W1205 15:04:50.737090 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ef68ef3_0dac_4194_9d02_c0156a923d76.slice/crio-b91ec3cad7677d0351063537e0d4e56382d67d32d77623aedeab7e986e1da40f WatchSource:0}: Error finding container b91ec3cad7677d0351063537e0d4e56382d67d32d77623aedeab7e986e1da40f: Status 404 returned error can't find the container with id b91ec3cad7677d0351063537e0d4e56382d67d32d77623aedeab7e986e1da40f Dec 05 15:04:51 crc kubenswrapper[4840]: I1205 15:04:51.277727 4840 generic.go:334] "Generic (PLEG): container finished" podID="7ef68ef3-0dac-4194-9d02-c0156a923d76" containerID="0d8e96cf2fecf8698f15722ba2682b70c7ea72c660acb477786a5ed4452f3ce9" exitCode=0 Dec 05 15:04:51 crc kubenswrapper[4840]: I1205 15:04:51.277830 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-25nwq" event={"ID":"7ef68ef3-0dac-4194-9d02-c0156a923d76","Type":"ContainerDied","Data":"0d8e96cf2fecf8698f15722ba2682b70c7ea72c660acb477786a5ed4452f3ce9"} Dec 05 15:04:51 crc kubenswrapper[4840]: I1205 15:04:51.277857 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-25nwq" event={"ID":"7ef68ef3-0dac-4194-9d02-c0156a923d76","Type":"ContainerStarted","Data":"b91ec3cad7677d0351063537e0d4e56382d67d32d77623aedeab7e986e1da40f"} Dec 05 15:04:51 crc kubenswrapper[4840]: I1205 15:04:51.280973 4840 generic.go:334] "Generic (PLEG): container finished" podID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerID="ae5a39222b2503a505c76060889d168da2c87a679f9c593e3112fbd1b8184b5b" exitCode=0 Dec 05 15:04:51 crc kubenswrapper[4840]: I1205 15:04:51.281374 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmzgl" event={"ID":"df7e1030-5127-493c-ab1a-a8f8ce069e83","Type":"ContainerDied","Data":"ae5a39222b2503a505c76060889d168da2c87a679f9c593e3112fbd1b8184b5b"} Dec 05 15:04:51 crc kubenswrapper[4840]: I1205 15:04:51.281405 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmzgl" event={"ID":"df7e1030-5127-493c-ab1a-a8f8ce069e83","Type":"ContainerStarted","Data":"44d5e36fe4d4b7c804a5fdf15a997852bd031ca911260cf3b6c4fad59c6efba4"} Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.176447 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x6pn9"] Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.179999 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.182430 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.183703 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6pn9"] Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.231764 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-utilities\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.231827 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-catalog-content\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.231852 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zx6c\" (UniqueName: \"kubernetes.io/projected/1d10a78b-740a-45ae-bb58-1d2802630d99-kube-api-access-2zx6c\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.333544 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-utilities\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.333643 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-catalog-content\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.333695 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zx6c\" (UniqueName: \"kubernetes.io/projected/1d10a78b-740a-45ae-bb58-1d2802630d99-kube-api-access-2zx6c\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.334033 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-utilities\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.334092 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-catalog-content\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.365013 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zx6c\" (UniqueName: \"kubernetes.io/projected/1d10a78b-740a-45ae-bb58-1d2802630d99-kube-api-access-2zx6c\") pod \"certified-operators-x6pn9\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.378008 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cfp8t"] Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.380941 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.383226 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.390975 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cfp8t"] Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.434896 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-catalog-content\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.434964 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-utilities\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.435166 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2vb2\" (UniqueName: \"kubernetes.io/projected/bafc2f1e-3440-4f41-a248-dbc7d322249a-kube-api-access-f2vb2\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.536413 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-utilities\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.536735 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2vb2\" (UniqueName: \"kubernetes.io/projected/bafc2f1e-3440-4f41-a248-dbc7d322249a-kube-api-access-f2vb2\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.536906 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.537006 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-utilities\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.537269 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-catalog-content\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.537030 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-catalog-content\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.555452 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2vb2\" (UniqueName: \"kubernetes.io/projected/bafc2f1e-3440-4f41-a248-dbc7d322249a-kube-api-access-f2vb2\") pod \"community-operators-cfp8t\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.717200 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:04:52 crc kubenswrapper[4840]: I1205 15:04:52.925993 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6pn9"] Dec 05 15:04:52 crc kubenswrapper[4840]: W1205 15:04:52.943130 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d10a78b_740a_45ae_bb58_1d2802630d99.slice/crio-cad8c8c3fb7f0b7a36733b0453e17c55692bc5207b361c25c3b2c7d401b166bc WatchSource:0}: Error finding container cad8c8c3fb7f0b7a36733b0453e17c55692bc5207b361c25c3b2c7d401b166bc: Status 404 returned error can't find the container with id cad8c8c3fb7f0b7a36733b0453e17c55692bc5207b361c25c3b2c7d401b166bc Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.138746 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cfp8t"] Dec 05 15:04:53 crc kubenswrapper[4840]: W1205 15:04:53.154177 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbafc2f1e_3440_4f41_a248_dbc7d322249a.slice/crio-6b2fc392771c158563f6517bf61262172dceca83cb740e51a28efdee29a0a78c WatchSource:0}: Error finding container 6b2fc392771c158563f6517bf61262172dceca83cb740e51a28efdee29a0a78c: Status 404 returned error can't find the container with id 6b2fc392771c158563f6517bf61262172dceca83cb740e51a28efdee29a0a78c Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.291695 4840 generic.go:334] "Generic (PLEG): container finished" podID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerID="90ccc15369691e50ed1059498beac37bb4c298328db64c84fcb6cad6374e4bb7" exitCode=0 Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.291731 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmzgl" event={"ID":"df7e1030-5127-493c-ab1a-a8f8ce069e83","Type":"ContainerDied","Data":"90ccc15369691e50ed1059498beac37bb4c298328db64c84fcb6cad6374e4bb7"} Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.293162 4840 generic.go:334] "Generic (PLEG): container finished" podID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerID="826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc" exitCode=0 Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.293202 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6pn9" event={"ID":"1d10a78b-740a-45ae-bb58-1d2802630d99","Type":"ContainerDied","Data":"826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc"} Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.293220 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6pn9" event={"ID":"1d10a78b-740a-45ae-bb58-1d2802630d99","Type":"ContainerStarted","Data":"cad8c8c3fb7f0b7a36733b0453e17c55692bc5207b361c25c3b2c7d401b166bc"} Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.295821 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfp8t" event={"ID":"bafc2f1e-3440-4f41-a248-dbc7d322249a","Type":"ContainerStarted","Data":"93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130"} Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.295849 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfp8t" event={"ID":"bafc2f1e-3440-4f41-a248-dbc7d322249a","Type":"ContainerStarted","Data":"6b2fc392771c158563f6517bf61262172dceca83cb740e51a28efdee29a0a78c"} Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.299600 4840 generic.go:334] "Generic (PLEG): container finished" podID="7ef68ef3-0dac-4194-9d02-c0156a923d76" containerID="41bf50d5d37cff0757920713f7efd9b36f1645e4c73f296896d06bb4deab9d34" exitCode=0 Dec 05 15:04:53 crc kubenswrapper[4840]: I1205 15:04:53.299640 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-25nwq" event={"ID":"7ef68ef3-0dac-4194-9d02-c0156a923d76","Type":"ContainerDied","Data":"41bf50d5d37cff0757920713f7efd9b36f1645e4c73f296896d06bb4deab9d34"} Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.306647 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmzgl" event={"ID":"df7e1030-5127-493c-ab1a-a8f8ce069e83","Type":"ContainerStarted","Data":"be2414039a111f9b68cb77acd4316c7d65672af01e7664b180530d38ae3f30f0"} Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.308369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6pn9" event={"ID":"1d10a78b-740a-45ae-bb58-1d2802630d99","Type":"ContainerStarted","Data":"7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6"} Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.310802 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-25nwq" event={"ID":"7ef68ef3-0dac-4194-9d02-c0156a923d76","Type":"ContainerStarted","Data":"9fd66371deb8ffd1634166820fa139d8beedbb7dcc48b329977fd3c61513bb36"} Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.312173 4840 generic.go:334] "Generic (PLEG): container finished" podID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerID="93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130" exitCode=0 Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.312194 4840 generic.go:334] "Generic (PLEG): container finished" podID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerID="ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae" exitCode=0 Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.312211 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfp8t" event={"ID":"bafc2f1e-3440-4f41-a248-dbc7d322249a","Type":"ContainerDied","Data":"93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130"} Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.312226 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfp8t" event={"ID":"bafc2f1e-3440-4f41-a248-dbc7d322249a","Type":"ContainerDied","Data":"ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae"} Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.327571 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hmzgl" podStartSLOduration=2.873810214 podStartE2EDuration="5.32753997s" podCreationTimestamp="2025-12-05 15:04:49 +0000 UTC" firstStartedPulling="2025-12-05 15:04:51.283229391 +0000 UTC m=+369.624292005" lastFinishedPulling="2025-12-05 15:04:53.736959147 +0000 UTC m=+372.078021761" observedRunningTime="2025-12-05 15:04:54.321787977 +0000 UTC m=+372.662850601" watchObservedRunningTime="2025-12-05 15:04:54.32753997 +0000 UTC m=+372.668602594" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.362432 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-25nwq" podStartSLOduration=2.800483114 podStartE2EDuration="5.362409879s" podCreationTimestamp="2025-12-05 15:04:49 +0000 UTC" firstStartedPulling="2025-12-05 15:04:51.281211574 +0000 UTC m=+369.622274228" lastFinishedPulling="2025-12-05 15:04:53.843138379 +0000 UTC m=+372.184200993" observedRunningTime="2025-12-05 15:04:54.357343836 +0000 UTC m=+372.698406450" watchObservedRunningTime="2025-12-05 15:04:54.362409879 +0000 UTC m=+372.703472493" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.555187 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lkplj"] Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.556360 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.566696 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lkplj"] Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.679812 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bd2926c2-0f89-4883-a631-f2c15348ec39-registry-certificates\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.679856 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5w6v\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-kube-api-access-w5w6v\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.679923 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.680060 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bd2926c2-0f89-4883-a631-f2c15348ec39-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.680087 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-bound-sa-token\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.680119 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd2926c2-0f89-4883-a631-f2c15348ec39-trusted-ca\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.680142 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-registry-tls\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.680159 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bd2926c2-0f89-4883-a631-f2c15348ec39-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.710294 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.781336 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bd2926c2-0f89-4883-a631-f2c15348ec39-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.781387 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-bound-sa-token\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.781432 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd2926c2-0f89-4883-a631-f2c15348ec39-trusted-ca\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.781464 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-registry-tls\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.781492 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bd2926c2-0f89-4883-a631-f2c15348ec39-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.781514 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bd2926c2-0f89-4883-a631-f2c15348ec39-registry-certificates\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.781536 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5w6v\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-kube-api-access-w5w6v\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.785580 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bd2926c2-0f89-4883-a631-f2c15348ec39-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.785804 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bd2926c2-0f89-4883-a631-f2c15348ec39-registry-certificates\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.787099 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd2926c2-0f89-4883-a631-f2c15348ec39-trusted-ca\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.789137 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-registry-tls\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.789680 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bd2926c2-0f89-4883-a631-f2c15348ec39-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.799246 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-bound-sa-token\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.799385 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5w6v\" (UniqueName: \"kubernetes.io/projected/bd2926c2-0f89-4883-a631-f2c15348ec39-kube-api-access-w5w6v\") pod \"image-registry-66df7c8f76-lkplj\" (UID: \"bd2926c2-0f89-4883-a631-f2c15348ec39\") " pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:54 crc kubenswrapper[4840]: I1205 15:04:54.878318 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:55 crc kubenswrapper[4840]: I1205 15:04:55.306210 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lkplj"] Dec 05 15:04:55 crc kubenswrapper[4840]: W1205 15:04:55.312478 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd2926c2_0f89_4883_a631_f2c15348ec39.slice/crio-8b9a251d0579da2aa7168e77aa1abd110853a278aa01d10c54bbce7211a60c3b WatchSource:0}: Error finding container 8b9a251d0579da2aa7168e77aa1abd110853a278aa01d10c54bbce7211a60c3b: Status 404 returned error can't find the container with id 8b9a251d0579da2aa7168e77aa1abd110853a278aa01d10c54bbce7211a60c3b Dec 05 15:04:55 crc kubenswrapper[4840]: I1205 15:04:55.318402 4840 generic.go:334] "Generic (PLEG): container finished" podID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerID="7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6" exitCode=0 Dec 05 15:04:55 crc kubenswrapper[4840]: I1205 15:04:55.318474 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6pn9" event={"ID":"1d10a78b-740a-45ae-bb58-1d2802630d99","Type":"ContainerDied","Data":"7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6"} Dec 05 15:04:55 crc kubenswrapper[4840]: I1205 15:04:55.323102 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfp8t" event={"ID":"bafc2f1e-3440-4f41-a248-dbc7d322249a","Type":"ContainerStarted","Data":"685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38"} Dec 05 15:04:56 crc kubenswrapper[4840]: I1205 15:04:56.331300 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6pn9" event={"ID":"1d10a78b-740a-45ae-bb58-1d2802630d99","Type":"ContainerStarted","Data":"c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020"} Dec 05 15:04:56 crc kubenswrapper[4840]: I1205 15:04:56.334979 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" event={"ID":"bd2926c2-0f89-4883-a631-f2c15348ec39","Type":"ContainerStarted","Data":"ef2ce2a5d747e261a9f6b15655807292d2f07c477768fbd3074efb64994d71f3"} Dec 05 15:04:56 crc kubenswrapper[4840]: I1205 15:04:56.335036 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" event={"ID":"bd2926c2-0f89-4883-a631-f2c15348ec39","Type":"ContainerStarted","Data":"8b9a251d0579da2aa7168e77aa1abd110853a278aa01d10c54bbce7211a60c3b"} Dec 05 15:04:56 crc kubenswrapper[4840]: I1205 15:04:56.335159 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:04:56 crc kubenswrapper[4840]: I1205 15:04:56.362085 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cfp8t" podStartSLOduration=2.94901671 podStartE2EDuration="4.362067915s" podCreationTimestamp="2025-12-05 15:04:52 +0000 UTC" firstStartedPulling="2025-12-05 15:04:53.297212272 +0000 UTC m=+371.638274886" lastFinishedPulling="2025-12-05 15:04:54.710263487 +0000 UTC m=+373.051326091" observedRunningTime="2025-12-05 15:04:55.370362993 +0000 UTC m=+373.711425627" watchObservedRunningTime="2025-12-05 15:04:56.362067915 +0000 UTC m=+374.703130529" Dec 05 15:04:56 crc kubenswrapper[4840]: I1205 15:04:56.364094 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x6pn9" podStartSLOduration=1.9322641969999999 podStartE2EDuration="4.364087572s" podCreationTimestamp="2025-12-05 15:04:52 +0000 UTC" firstStartedPulling="2025-12-05 15:04:53.295609927 +0000 UTC m=+371.636672541" lastFinishedPulling="2025-12-05 15:04:55.727433302 +0000 UTC m=+374.068495916" observedRunningTime="2025-12-05 15:04:56.357533467 +0000 UTC m=+374.698596081" watchObservedRunningTime="2025-12-05 15:04:56.364087572 +0000 UTC m=+374.705150186" Dec 05 15:04:56 crc kubenswrapper[4840]: I1205 15:04:56.390658 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" podStartSLOduration=2.390642756 podStartE2EDuration="2.390642756s" podCreationTimestamp="2025-12-05 15:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:04:56.386788217 +0000 UTC m=+374.727850851" watchObservedRunningTime="2025-12-05 15:04:56.390642756 +0000 UTC m=+374.731705370" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.098660 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.099060 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.157254 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.297273 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.297354 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.346600 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.402187 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:05:00 crc kubenswrapper[4840]: I1205 15:05:00.414715 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-25nwq" Dec 05 15:05:02 crc kubenswrapper[4840]: I1205 15:05:02.537226 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:05:02 crc kubenswrapper[4840]: I1205 15:05:02.537286 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:05:02 crc kubenswrapper[4840]: I1205 15:05:02.576908 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:05:02 crc kubenswrapper[4840]: I1205 15:05:02.718199 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:05:02 crc kubenswrapper[4840]: I1205 15:05:02.733191 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:05:02 crc kubenswrapper[4840]: I1205 15:05:02.777628 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:05:03 crc kubenswrapper[4840]: I1205 15:05:03.411025 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:05:03 crc kubenswrapper[4840]: I1205 15:05:03.416691 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:05:14 crc kubenswrapper[4840]: I1205 15:05:14.891000 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-lkplj" Dec 05 15:05:14 crc kubenswrapper[4840]: I1205 15:05:14.967897 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xsrwv"] Dec 05 15:05:19 crc kubenswrapper[4840]: I1205 15:05:19.471986 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:05:19 crc kubenswrapper[4840]: I1205 15:05:19.472356 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.010151 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" podUID="71224ef1-9751-49f0-89d6-18b5225f97cb" containerName="registry" containerID="cri-o://b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f" gracePeriod=30 Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.382511 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.395624 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-certificates\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.396125 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71224ef1-9751-49f0-89d6-18b5225f97cb-installation-pull-secrets\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.396186 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6whxk\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-kube-api-access-6whxk\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.396260 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71224ef1-9751-49f0-89d6-18b5225f97cb-ca-trust-extracted\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.396301 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-trusted-ca\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.396476 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.396567 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-bound-sa-token\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.396629 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-tls\") pod \"71224ef1-9751-49f0-89d6-18b5225f97cb\" (UID: \"71224ef1-9751-49f0-89d6-18b5225f97cb\") " Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.397295 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.402077 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.405842 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-kube-api-access-6whxk" (OuterVolumeSpecName: "kube-api-access-6whxk") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "kube-api-access-6whxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.407686 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.408943 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.409964 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71224ef1-9751-49f0-89d6-18b5225f97cb-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.420451 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71224ef1-9751-49f0-89d6-18b5225f97cb-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.437369 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "71224ef1-9751-49f0-89d6-18b5225f97cb" (UID: "71224ef1-9751-49f0-89d6-18b5225f97cb"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.498491 4840 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/71224ef1-9751-49f0-89d6-18b5225f97cb-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.498534 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6whxk\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-kube-api-access-6whxk\") on node \"crc\" DevicePath \"\"" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.498547 4840 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/71224ef1-9751-49f0-89d6-18b5225f97cb-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.498560 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.498573 4840 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.498584 4840 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.498594 4840 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/71224ef1-9751-49f0-89d6-18b5225f97cb-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.618828 4840 generic.go:334] "Generic (PLEG): container finished" podID="71224ef1-9751-49f0-89d6-18b5225f97cb" containerID="b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f" exitCode=0 Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.618884 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" event={"ID":"71224ef1-9751-49f0-89d6-18b5225f97cb","Type":"ContainerDied","Data":"b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f"} Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.618910 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" event={"ID":"71224ef1-9751-49f0-89d6-18b5225f97cb","Type":"ContainerDied","Data":"43d6bc68f8980ff493e58e822c4b1079bf9718b91cb6a17f0210692a4ed87837"} Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.618919 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xsrwv" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.618925 4840 scope.go:117] "RemoveContainer" containerID="b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.640167 4840 scope.go:117] "RemoveContainer" containerID="b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f" Dec 05 15:05:40 crc kubenswrapper[4840]: E1205 15:05:40.640717 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f\": container with ID starting with b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f not found: ID does not exist" containerID="b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.641119 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f"} err="failed to get container status \"b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f\": rpc error: code = NotFound desc = could not find container \"b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f\": container with ID starting with b228830e1db6932884a46b590f60d89e53e11dc44b41dfc8d7d80c9a70c40c0f not found: ID does not exist" Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.657431 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xsrwv"] Dec 05 15:05:40 crc kubenswrapper[4840]: I1205 15:05:40.664710 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xsrwv"] Dec 05 15:05:42 crc kubenswrapper[4840]: I1205 15:05:42.075347 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71224ef1-9751-49f0-89d6-18b5225f97cb" path="/var/lib/kubelet/pods/71224ef1-9751-49f0-89d6-18b5225f97cb/volumes" Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.472000 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.472394 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.472497 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.473312 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"94a41c63939146ef314f1a8ae64ad21aa7f707bfd188ab51b86cffd43bc910f9"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.473399 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://94a41c63939146ef314f1a8ae64ad21aa7f707bfd188ab51b86cffd43bc910f9" gracePeriod=600 Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.671198 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="94a41c63939146ef314f1a8ae64ad21aa7f707bfd188ab51b86cffd43bc910f9" exitCode=0 Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.671251 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"94a41c63939146ef314f1a8ae64ad21aa7f707bfd188ab51b86cffd43bc910f9"} Dec 05 15:05:49 crc kubenswrapper[4840]: I1205 15:05:49.671293 4840 scope.go:117] "RemoveContainer" containerID="e6ee787ad610714b9ffde58c2a77a956a8e3e6e0ea3e589a8205addc54210705" Dec 05 15:05:50 crc kubenswrapper[4840]: I1205 15:05:50.680652 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"5fb60fdb5ee2de7da83573520684a8ffa3aaf560d6a723c6162e48846e14816d"} Dec 05 15:07:49 crc kubenswrapper[4840]: I1205 15:07:49.472410 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:07:49 crc kubenswrapper[4840]: I1205 15:07:49.473168 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:08:19 crc kubenswrapper[4840]: I1205 15:08:19.472744 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:08:19 crc kubenswrapper[4840]: I1205 15:08:19.473467 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.472427 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.472995 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.473051 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.473717 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5fb60fdb5ee2de7da83573520684a8ffa3aaf560d6a723c6162e48846e14816d"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.473777 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://5fb60fdb5ee2de7da83573520684a8ffa3aaf560d6a723c6162e48846e14816d" gracePeriod=600 Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.878636 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="5fb60fdb5ee2de7da83573520684a8ffa3aaf560d6a723c6162e48846e14816d" exitCode=0 Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.878727 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"5fb60fdb5ee2de7da83573520684a8ffa3aaf560d6a723c6162e48846e14816d"} Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.879006 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"83830503ae8c68349e9a52130f5a53e1e9c359c7b97c632d824c7d4a08e0e1f1"} Dec 05 15:08:49 crc kubenswrapper[4840]: I1205 15:08:49.879030 4840 scope.go:117] "RemoveContainer" containerID="94a41c63939146ef314f1a8ae64ad21aa7f707bfd188ab51b86cffd43bc910f9" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.913549 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-n8sd9"] Dec 05 15:09:41 crc kubenswrapper[4840]: E1205 15:09:41.914367 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71224ef1-9751-49f0-89d6-18b5225f97cb" containerName="registry" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.914380 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="71224ef1-9751-49f0-89d6-18b5225f97cb" containerName="registry" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.914474 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="71224ef1-9751-49f0-89d6-18b5225f97cb" containerName="registry" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.914961 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.916958 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-t8hxr"] Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.929432 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.930161 4840 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-xcm82" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.931319 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.934255 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-t8hxr" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.939723 4840 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-pwlxg" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.950795 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-n8sd9"] Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.955150 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8nl6w"] Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.955888 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.957613 4840 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lbvdz" Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.959974 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-t8hxr"] Dec 05 15:09:41 crc kubenswrapper[4840]: I1205 15:09:41.966391 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8nl6w"] Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.006604 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngwsx\" (UniqueName: \"kubernetes.io/projected/d94bca81-5539-4bc2-bbec-38a88770929d-kube-api-access-ngwsx\") pod \"cert-manager-5b446d88c5-t8hxr\" (UID: \"d94bca81-5539-4bc2-bbec-38a88770929d\") " pod="cert-manager/cert-manager-5b446d88c5-t8hxr" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.006699 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfjd8\" (UniqueName: \"kubernetes.io/projected/7d96fedc-8d6a-4b34-af3e-58104249edc2-kube-api-access-zfjd8\") pod \"cert-manager-cainjector-7f985d654d-n8sd9\" (UID: \"7d96fedc-8d6a-4b34-af3e-58104249edc2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.107915 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfjd8\" (UniqueName: \"kubernetes.io/projected/7d96fedc-8d6a-4b34-af3e-58104249edc2-kube-api-access-zfjd8\") pod \"cert-manager-cainjector-7f985d654d-n8sd9\" (UID: \"7d96fedc-8d6a-4b34-af3e-58104249edc2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.108170 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngwsx\" (UniqueName: \"kubernetes.io/projected/d94bca81-5539-4bc2-bbec-38a88770929d-kube-api-access-ngwsx\") pod \"cert-manager-5b446d88c5-t8hxr\" (UID: \"d94bca81-5539-4bc2-bbec-38a88770929d\") " pod="cert-manager/cert-manager-5b446d88c5-t8hxr" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.108290 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92jcm\" (UniqueName: \"kubernetes.io/projected/839aa773-117e-423c-853e-37f05ff674a1-kube-api-access-92jcm\") pod \"cert-manager-webhook-5655c58dd6-8nl6w\" (UID: \"839aa773-117e-423c-853e-37f05ff674a1\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.121265 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.131356 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.146652 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngwsx\" (UniqueName: \"kubernetes.io/projected/d94bca81-5539-4bc2-bbec-38a88770929d-kube-api-access-ngwsx\") pod \"cert-manager-5b446d88c5-t8hxr\" (UID: \"d94bca81-5539-4bc2-bbec-38a88770929d\") " pod="cert-manager/cert-manager-5b446d88c5-t8hxr" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.146717 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfjd8\" (UniqueName: \"kubernetes.io/projected/7d96fedc-8d6a-4b34-af3e-58104249edc2-kube-api-access-zfjd8\") pod \"cert-manager-cainjector-7f985d654d-n8sd9\" (UID: \"7d96fedc-8d6a-4b34-af3e-58104249edc2\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.210036 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92jcm\" (UniqueName: \"kubernetes.io/projected/839aa773-117e-423c-853e-37f05ff674a1-kube-api-access-92jcm\") pod \"cert-manager-webhook-5655c58dd6-8nl6w\" (UID: \"839aa773-117e-423c-853e-37f05ff674a1\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.231553 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92jcm\" (UniqueName: \"kubernetes.io/projected/839aa773-117e-423c-853e-37f05ff674a1-kube-api-access-92jcm\") pod \"cert-manager-webhook-5655c58dd6-8nl6w\" (UID: \"839aa773-117e-423c-853e-37f05ff674a1\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.252130 4840 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-xcm82" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.261400 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.264847 4840 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-pwlxg" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.274040 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-t8hxr" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.279871 4840 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lbvdz" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.288762 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.675281 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-n8sd9"] Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.682814 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.734474 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8nl6w"] Dec 05 15:09:42 crc kubenswrapper[4840]: I1205 15:09:42.741130 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-t8hxr"] Dec 05 15:09:42 crc kubenswrapper[4840]: W1205 15:09:42.745594 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd94bca81_5539_4bc2_bbec_38a88770929d.slice/crio-5c8a15bf1bdf4cadb668091e23893ec337a593b9a9a5d5df278edecd5ad0fe99 WatchSource:0}: Error finding container 5c8a15bf1bdf4cadb668091e23893ec337a593b9a9a5d5df278edecd5ad0fe99: Status 404 returned error can't find the container with id 5c8a15bf1bdf4cadb668091e23893ec337a593b9a9a5d5df278edecd5ad0fe99 Dec 05 15:09:43 crc kubenswrapper[4840]: I1205 15:09:43.173466 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-t8hxr" event={"ID":"d94bca81-5539-4bc2-bbec-38a88770929d","Type":"ContainerStarted","Data":"5c8a15bf1bdf4cadb668091e23893ec337a593b9a9a5d5df278edecd5ad0fe99"} Dec 05 15:09:43 crc kubenswrapper[4840]: I1205 15:09:43.175058 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" event={"ID":"7d96fedc-8d6a-4b34-af3e-58104249edc2","Type":"ContainerStarted","Data":"b30853831646a9f9e206d9e018eeb8f149c17a2707d97a94f96867680ea9604e"} Dec 05 15:09:43 crc kubenswrapper[4840]: I1205 15:09:43.176685 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" event={"ID":"839aa773-117e-423c-853e-37f05ff674a1","Type":"ContainerStarted","Data":"59a7c3cc77ac0ae494af09f1155edaaa9689f34b25fcfe25a2be6c487722cdbb"} Dec 05 15:09:47 crc kubenswrapper[4840]: I1205 15:09:47.195105 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-t8hxr" event={"ID":"d94bca81-5539-4bc2-bbec-38a88770929d","Type":"ContainerStarted","Data":"4d4bb2cdfd5fed50127a6ab825795f6176a6352cf0f36554617040d1f01b92c3"} Dec 05 15:09:47 crc kubenswrapper[4840]: I1205 15:09:47.196507 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" event={"ID":"7d96fedc-8d6a-4b34-af3e-58104249edc2","Type":"ContainerStarted","Data":"22c9dca176adfcdadb5d211c2d2e18115083546b30ba64b6126ec6bd959c6c09"} Dec 05 15:09:47 crc kubenswrapper[4840]: I1205 15:09:47.197797 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" event={"ID":"839aa773-117e-423c-853e-37f05ff674a1","Type":"ContainerStarted","Data":"e25f830426681736674c3c0abbe00c9a9a4856aac764994cc413c842273a6c00"} Dec 05 15:09:47 crc kubenswrapper[4840]: I1205 15:09:47.197921 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" Dec 05 15:09:47 crc kubenswrapper[4840]: I1205 15:09:47.210274 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-t8hxr" podStartSLOduration=2.229754245 podStartE2EDuration="6.210259646s" podCreationTimestamp="2025-12-05 15:09:41 +0000 UTC" firstStartedPulling="2025-12-05 15:09:42.747782242 +0000 UTC m=+661.088844856" lastFinishedPulling="2025-12-05 15:09:46.728287613 +0000 UTC m=+665.069350257" observedRunningTime="2025-12-05 15:09:47.206937411 +0000 UTC m=+665.548000025" watchObservedRunningTime="2025-12-05 15:09:47.210259646 +0000 UTC m=+665.551322260" Dec 05 15:09:47 crc kubenswrapper[4840]: I1205 15:09:47.221834 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" podStartSLOduration=2.266298329 podStartE2EDuration="6.221816136s" podCreationTimestamp="2025-12-05 15:09:41 +0000 UTC" firstStartedPulling="2025-12-05 15:09:42.745125507 +0000 UTC m=+661.086188121" lastFinishedPulling="2025-12-05 15:09:46.700643324 +0000 UTC m=+665.041705928" observedRunningTime="2025-12-05 15:09:47.219443268 +0000 UTC m=+665.560505882" watchObservedRunningTime="2025-12-05 15:09:47.221816136 +0000 UTC m=+665.562878750" Dec 05 15:09:47 crc kubenswrapper[4840]: I1205 15:09:47.233005 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-n8sd9" podStartSLOduration=2.206899154 podStartE2EDuration="6.232986574s" podCreationTimestamp="2025-12-05 15:09:41 +0000 UTC" firstStartedPulling="2025-12-05 15:09:42.682548682 +0000 UTC m=+661.023611296" lastFinishedPulling="2025-12-05 15:09:46.708636102 +0000 UTC m=+665.049698716" observedRunningTime="2025-12-05 15:09:47.231674527 +0000 UTC m=+665.572737151" watchObservedRunningTime="2025-12-05 15:09:47.232986574 +0000 UTC m=+665.574049188" Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.292741 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-8nl6w" Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.653790 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-czvxk"] Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.654438 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="nbdb" containerID="cri-o://6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585" gracePeriod=30 Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.654513 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5" gracePeriod=30 Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.654693 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="northd" containerID="cri-o://152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5" gracePeriod=30 Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.654811 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="sbdb" containerID="cri-o://4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6" gracePeriod=30 Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.655010 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-acl-logging" containerID="cri-o://ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2" gracePeriod=30 Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.655152 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-controller" containerID="cri-o://4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c" gracePeriod=30 Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.655198 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-node" containerID="cri-o://349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48" gracePeriod=30 Dec 05 15:09:52 crc kubenswrapper[4840]: I1205 15:09:52.691491 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" containerID="cri-o://2144a37938b23a0d2c63e5ce547c2a3b5eb0cc5976c9125c6788c1bbed102368" gracePeriod=30 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.244209 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovnkube-controller/3.log" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.248000 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovn-acl-logging/0.log" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.249008 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovn-controller/0.log" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.249857 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="2144a37938b23a0d2c63e5ce547c2a3b5eb0cc5976c9125c6788c1bbed102368" exitCode=0 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.249921 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6" exitCode=0 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.249932 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585" exitCode=0 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.249943 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5" exitCode=0 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.249984 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5" exitCode=0 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.249997 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48" exitCode=0 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250008 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2" exitCode=143 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250019 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerID="4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c" exitCode=143 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250115 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"2144a37938b23a0d2c63e5ce547c2a3b5eb0cc5976c9125c6788c1bbed102368"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250192 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250241 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250259 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250271 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250282 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250295 4840 scope.go:117] "RemoveContainer" containerID="138309b3756a49d9260dc20eccf20c12afdf023af9a1e7ce18c2e3211f84e616" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250294 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.250338 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.253023 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/2.log" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.254293 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/1.log" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.254351 4840 generic.go:334] "Generic (PLEG): container finished" podID="ffd91a64-4156-418d-8348-1efa3563e904" containerID="83f6a1b848d66de940665d1fced2d72d3aad317913ba5ab4c94c1951388c49cb" exitCode=2 Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.254387 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerDied","Data":"83f6a1b848d66de940665d1fced2d72d3aad317913ba5ab4c94c1951388c49cb"} Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.254932 4840 scope.go:117] "RemoveContainer" containerID="83f6a1b848d66de940665d1fced2d72d3aad317913ba5ab4c94c1951388c49cb" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.255219 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-q8pn7_openshift-multus(ffd91a64-4156-418d-8348-1efa3563e904)\"" pod="openshift-multus/multus-q8pn7" podUID="ffd91a64-4156-418d-8348-1efa3563e904" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.285529 4840 scope.go:117] "RemoveContainer" containerID="ddb0c1f54c32a87c863028965a174607b182b1a1ae7e681045ad724e50e0d7d8" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.327608 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovn-acl-logging/0.log" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.328921 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovn-controller/0.log" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.329359 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.391727 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rrgp7"] Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.391962 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.391974 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.391983 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-node" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.391989 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-node" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392002 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kubecfg-setup" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392008 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kubecfg-setup" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392016 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="sbdb" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392022 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="sbdb" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392030 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392035 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392043 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392048 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392054 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-acl-logging" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392060 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-acl-logging" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392067 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="northd" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392074 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="northd" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392083 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="nbdb" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392089 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="nbdb" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392098 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392104 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392111 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392116 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392124 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392130 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392215 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-node" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392226 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392232 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392239 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="nbdb" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392248 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392255 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="sbdb" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392264 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392271 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovn-acl-logging" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392281 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392287 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="northd" Dec 05 15:09:53 crc kubenswrapper[4840]: E1205 15:09:53.392366 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392373 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392452 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.392610 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" containerName="ovnkube-controller" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.393896 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471775 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-ovn\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471828 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-netns\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471884 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-node-log\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471906 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-netd\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471942 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471959 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-systemd-units\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471966 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471990 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-env-overrides\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472015 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-kubelet\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.471987 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472033 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-systemd\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472018 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472011 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-node-log" (OuterVolumeSpecName: "node-log") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472050 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472059 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9a432c2-725d-46c6-963e-68a99ba35c89-ovn-node-metrics-cert\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472162 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-openvswitch\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472184 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-bin\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472204 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-slash\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472228 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-config\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472246 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-etc-openvswitch\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472257 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472268 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-script-lib\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472293 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472297 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-ovn-kubernetes\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472348 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472364 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-var-lib-cni-networks-ovn-kubernetes\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472381 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472392 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-log-socket\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472413 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-log-socket" (OuterVolumeSpecName: "log-socket") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472414 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472442 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472450 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbq6d\" (UniqueName: \"kubernetes.io/projected/e9a432c2-725d-46c6-963e-68a99ba35c89-kube-api-access-jbq6d\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472455 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-slash" (OuterVolumeSpecName: "host-slash") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472483 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-var-lib-openvswitch\") pod \"e9a432c2-725d-46c6-963e-68a99ba35c89\" (UID: \"e9a432c2-725d-46c6-963e-68a99ba35c89\") " Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472614 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472731 4840 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472736 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472744 4840 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472764 4840 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472773 4840 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472783 4840 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472793 4840 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472801 4840 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472809 4840 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472818 4840 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472826 4840 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472827 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472836 4840 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472897 4840 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472911 4840 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472923 4840 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.472934 4840 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.478037 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9a432c2-725d-46c6-963e-68a99ba35c89-kube-api-access-jbq6d" (OuterVolumeSpecName: "kube-api-access-jbq6d") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "kube-api-access-jbq6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.479702 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a432c2-725d-46c6-963e-68a99ba35c89-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.488047 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "e9a432c2-725d-46c6-963e-68a99ba35c89" (UID: "e9a432c2-725d-46c6-963e-68a99ba35c89"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.573921 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zggpv\" (UniqueName: \"kubernetes.io/projected/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-kube-api-access-zggpv\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.573974 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovn-node-metrics-cert\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.573993 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-log-socket\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574012 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-cni-netd\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574032 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-run-ovn-kubernetes\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574061 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-slash\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574091 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-systemd\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574145 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-etc-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574177 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovnkube-script-lib\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574196 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-node-log\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574216 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574269 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-cni-bin\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574287 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-var-lib-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574305 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovnkube-config\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574329 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-ovn\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574381 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-systemd-units\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574411 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-run-netns\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574429 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-kubelet\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574442 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574467 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-env-overrides\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574532 4840 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574552 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbq6d\" (UniqueName: \"kubernetes.io/projected/e9a432c2-725d-46c6-963e-68a99ba35c89-kube-api-access-jbq6d\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574562 4840 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9a432c2-725d-46c6-963e-68a99ba35c89-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574575 4840 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9a432c2-725d-46c6-963e-68a99ba35c89-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.574587 4840 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9a432c2-725d-46c6-963e-68a99ba35c89-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.675858 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-cni-bin\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.675963 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-var-lib-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676037 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovnkube-config\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676077 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-cni-bin\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676145 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-var-lib-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676210 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-ovn\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676288 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-systemd-units\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676350 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-run-netns\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676534 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-run-netns\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676431 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-systemd-units\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676625 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-ovn\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676569 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676734 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676776 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-kubelet\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676936 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-kubelet\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.676957 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-env-overrides\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677042 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zggpv\" (UniqueName: \"kubernetes.io/projected/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-kube-api-access-zggpv\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677102 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovn-node-metrics-cert\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677162 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-log-socket\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677219 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-cni-netd\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677279 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-run-ovn-kubernetes\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677337 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-slash\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677401 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-systemd\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677470 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-etc-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677526 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovnkube-config\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677906 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-env-overrides\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.677535 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovnkube-script-lib\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678022 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-node-log\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678081 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678226 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678312 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-node-log\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678386 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-run-ovn-kubernetes\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678459 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-log-socket\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678535 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-cni-netd\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678600 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-run-systemd\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678592 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovnkube-script-lib\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678661 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-host-slash\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.678689 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-etc-openvswitch\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.683249 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-ovn-node-metrics-cert\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:53 crc kubenswrapper[4840]: I1205 15:09:53.707184 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zggpv\" (UniqueName: \"kubernetes.io/projected/b5e388ed-ee69-492f-9bcd-fc29d8160cc9-kube-api-access-zggpv\") pod \"ovnkube-node-rrgp7\" (UID: \"b5e388ed-ee69-492f-9bcd-fc29d8160cc9\") " pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.007025 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:09:54 crc kubenswrapper[4840]: W1205 15:09:54.033042 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5e388ed_ee69_492f_9bcd_fc29d8160cc9.slice/crio-c503019fb39b5742de477d02dc16daa78cad08200b6f7a414b213bbad6982b2f WatchSource:0}: Error finding container c503019fb39b5742de477d02dc16daa78cad08200b6f7a414b213bbad6982b2f: Status 404 returned error can't find the container with id c503019fb39b5742de477d02dc16daa78cad08200b6f7a414b213bbad6982b2f Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.263559 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovn-acl-logging/0.log" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.264011 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-czvxk_e9a432c2-725d-46c6-963e-68a99ba35c89/ovn-controller/0.log" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.264396 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" event={"ID":"e9a432c2-725d-46c6-963e-68a99ba35c89","Type":"ContainerDied","Data":"35d579b74fcbf6e4655624c42ca06f576717bffd48f5e68405c791110e95dc85"} Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.264448 4840 scope.go:117] "RemoveContainer" containerID="2144a37938b23a0d2c63e5ce547c2a3b5eb0cc5976c9125c6788c1bbed102368" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.264482 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-czvxk" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.265303 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"c503019fb39b5742de477d02dc16daa78cad08200b6f7a414b213bbad6982b2f"} Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.267658 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/2.log" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.286955 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-czvxk"] Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.290119 4840 scope.go:117] "RemoveContainer" containerID="4c5623a0dfd7ec1ee9f1c90aebc4e7d4d46d287f136e148343db3376a4fe2ec6" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.291479 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-czvxk"] Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.302948 4840 scope.go:117] "RemoveContainer" containerID="6187e4a5d9269f954091cba3ee86d0467149c368bc84f91a81074ad1f1b04585" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.320849 4840 scope.go:117] "RemoveContainer" containerID="152a583ec9a1de6b978673d8884eaac6e88861f805f81bdb09f22c869ea3bfc5" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.334978 4840 scope.go:117] "RemoveContainer" containerID="4d22fda53427701477b62e794d00e6b3b62a8fb422efb54d825c3eb04ad69fa5" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.347161 4840 scope.go:117] "RemoveContainer" containerID="349e372c86bc95d65146748498aac1112fb0a1c05ebbb268c2187631ef4a7a48" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.361673 4840 scope.go:117] "RemoveContainer" containerID="ae08ec5b84ced94c24b919ef3e43ca5f7f6e4d2984304980440789f978b1c6a2" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.373136 4840 scope.go:117] "RemoveContainer" containerID="4aa10f8ac4244de8245b9c35d8db541d086e6db0f9f8bae9f0c6fba3f109287c" Dec 05 15:09:54 crc kubenswrapper[4840]: I1205 15:09:54.383665 4840 scope.go:117] "RemoveContainer" containerID="f6df68ba95796c7e6f98b1330d1ca03ec171e6dd34f673e0c6565b085c02efee" Dec 05 15:09:55 crc kubenswrapper[4840]: I1205 15:09:55.276119 4840 generic.go:334] "Generic (PLEG): container finished" podID="b5e388ed-ee69-492f-9bcd-fc29d8160cc9" containerID="8f44f95244d6bff4c72f24d08906cce79ebffdbd51955a6008bd5a4fb0dc9327" exitCode=0 Dec 05 15:09:55 crc kubenswrapper[4840]: I1205 15:09:55.276228 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerDied","Data":"8f44f95244d6bff4c72f24d08906cce79ebffdbd51955a6008bd5a4fb0dc9327"} Dec 05 15:09:56 crc kubenswrapper[4840]: I1205 15:09:56.073838 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9a432c2-725d-46c6-963e-68a99ba35c89" path="/var/lib/kubelet/pods/e9a432c2-725d-46c6-963e-68a99ba35c89/volumes" Dec 05 15:09:56 crc kubenswrapper[4840]: I1205 15:09:56.287755 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"28868594ff9145e7571a2718e7045052d4ee12c49c58c678331f97f02224e7c3"} Dec 05 15:09:56 crc kubenswrapper[4840]: I1205 15:09:56.287853 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"3c855700134db368eb91b57ffb1a824802e0960fb18df0fba5eb76f78239891e"} Dec 05 15:09:56 crc kubenswrapper[4840]: I1205 15:09:56.287909 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"b5033e66f4100b3f684daa03fad2ec0df72c7bdad9ecaec6143cd605293b8ad2"} Dec 05 15:09:56 crc kubenswrapper[4840]: I1205 15:09:56.287951 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"5ea424912974a4f20d9d5bc8bef9ea357cb4e2f6b17f94ab2a155459e7e512e3"} Dec 05 15:09:56 crc kubenswrapper[4840]: I1205 15:09:56.287967 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"0597efb0f4929f15678d1f93317e026d02f947e05a19275a58bc5ceaae7a565b"} Dec 05 15:09:56 crc kubenswrapper[4840]: I1205 15:09:56.287979 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"2e511a454edc28c29fa1440c13e69081f206a530686915ebdf3bd887dd260013"} Dec 05 15:09:58 crc kubenswrapper[4840]: I1205 15:09:58.302533 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"5052f23612a3f978e369c647a014c107bb9d144255d6a5d21d74b1e2475ef4db"} Dec 05 15:10:01 crc kubenswrapper[4840]: I1205 15:10:01.322172 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" event={"ID":"b5e388ed-ee69-492f-9bcd-fc29d8160cc9","Type":"ContainerStarted","Data":"b00b06f6bb542ffa536ba79fd4db719ca21872df5e0a1295f79279062da942df"} Dec 05 15:10:01 crc kubenswrapper[4840]: I1205 15:10:01.322793 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:10:01 crc kubenswrapper[4840]: I1205 15:10:01.359961 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:10:01 crc kubenswrapper[4840]: I1205 15:10:01.395402 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" podStartSLOduration=8.395378414 podStartE2EDuration="8.395378414s" podCreationTimestamp="2025-12-05 15:09:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:10:01.363486095 +0000 UTC m=+679.704548719" watchObservedRunningTime="2025-12-05 15:10:01.395378414 +0000 UTC m=+679.736441028" Dec 05 15:10:02 crc kubenswrapper[4840]: I1205 15:10:02.328484 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:10:02 crc kubenswrapper[4840]: I1205 15:10:02.328799 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:10:02 crc kubenswrapper[4840]: I1205 15:10:02.373400 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:10:07 crc kubenswrapper[4840]: I1205 15:10:07.067133 4840 scope.go:117] "RemoveContainer" containerID="83f6a1b848d66de940665d1fced2d72d3aad317913ba5ab4c94c1951388c49cb" Dec 05 15:10:07 crc kubenswrapper[4840]: E1205 15:10:07.067686 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-q8pn7_openshift-multus(ffd91a64-4156-418d-8348-1efa3563e904)\"" pod="openshift-multus/multus-q8pn7" podUID="ffd91a64-4156-418d-8348-1efa3563e904" Dec 05 15:10:19 crc kubenswrapper[4840]: I1205 15:10:19.066351 4840 scope.go:117] "RemoveContainer" containerID="83f6a1b848d66de940665d1fced2d72d3aad317913ba5ab4c94c1951388c49cb" Dec 05 15:10:19 crc kubenswrapper[4840]: I1205 15:10:19.436139 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-q8pn7_ffd91a64-4156-418d-8348-1efa3563e904/kube-multus/2.log" Dec 05 15:10:19 crc kubenswrapper[4840]: I1205 15:10:19.436476 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-q8pn7" event={"ID":"ffd91a64-4156-418d-8348-1efa3563e904","Type":"ContainerStarted","Data":"f0cd08ead33cb6d467aba886e0a1290327819dd09b1275c0bed05ce2793d296b"} Dec 05 15:10:24 crc kubenswrapper[4840]: I1205 15:10:24.029845 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rrgp7" Dec 05 15:10:31 crc kubenswrapper[4840]: I1205 15:10:31.804428 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd"] Dec 05 15:10:31 crc kubenswrapper[4840]: I1205 15:10:31.823734 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:31 crc kubenswrapper[4840]: I1205 15:10:31.828956 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 15:10:31 crc kubenswrapper[4840]: I1205 15:10:31.839193 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd"] Dec 05 15:10:31 crc kubenswrapper[4840]: I1205 15:10:31.925272 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:31 crc kubenswrapper[4840]: I1205 15:10:31.925373 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jbgm\" (UniqueName: \"kubernetes.io/projected/131b3e58-67f5-4c2a-8d70-ff674420a7a5-kube-api-access-8jbgm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:31 crc kubenswrapper[4840]: I1205 15:10:31.925540 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.026562 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.026602 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jbgm\" (UniqueName: \"kubernetes.io/projected/131b3e58-67f5-4c2a-8d70-ff674420a7a5-kube-api-access-8jbgm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.026660 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.027096 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.027825 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.050259 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jbgm\" (UniqueName: \"kubernetes.io/projected/131b3e58-67f5-4c2a-8d70-ff674420a7a5-kube-api-access-8jbgm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.141667 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.312934 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd"] Dec 05 15:10:32 crc kubenswrapper[4840]: I1205 15:10:32.503738 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" event={"ID":"131b3e58-67f5-4c2a-8d70-ff674420a7a5","Type":"ContainerStarted","Data":"cacd321f4166d19a83ff1edda0bf68364c64d7fb3183a33b99bf8f00cbeb1893"} Dec 05 15:10:33 crc kubenswrapper[4840]: I1205 15:10:33.510852 4840 generic.go:334] "Generic (PLEG): container finished" podID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerID="c54b0215495a9c469fc0c5e624a1b30f5615c6ba7e16e40e1496fe1a2e61072e" exitCode=0 Dec 05 15:10:33 crc kubenswrapper[4840]: I1205 15:10:33.510953 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" event={"ID":"131b3e58-67f5-4c2a-8d70-ff674420a7a5","Type":"ContainerDied","Data":"c54b0215495a9c469fc0c5e624a1b30f5615c6ba7e16e40e1496fe1a2e61072e"} Dec 05 15:10:35 crc kubenswrapper[4840]: I1205 15:10:35.523840 4840 generic.go:334] "Generic (PLEG): container finished" podID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerID="cda6c7669c2de07b47f2e0cf36145d97f9ed9aadc2298a70271113b3edafe60f" exitCode=0 Dec 05 15:10:35 crc kubenswrapper[4840]: I1205 15:10:35.524099 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" event={"ID":"131b3e58-67f5-4c2a-8d70-ff674420a7a5","Type":"ContainerDied","Data":"cda6c7669c2de07b47f2e0cf36145d97f9ed9aadc2298a70271113b3edafe60f"} Dec 05 15:10:36 crc kubenswrapper[4840]: I1205 15:10:36.533009 4840 generic.go:334] "Generic (PLEG): container finished" podID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerID="67219596f1fdbcba7c2fe17e9b54558ba4c69c8cdc5f39ad94512024adfcaaee" exitCode=0 Dec 05 15:10:36 crc kubenswrapper[4840]: I1205 15:10:36.533079 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" event={"ID":"131b3e58-67f5-4c2a-8d70-ff674420a7a5","Type":"ContainerDied","Data":"67219596f1fdbcba7c2fe17e9b54558ba4c69c8cdc5f39ad94512024adfcaaee"} Dec 05 15:10:37 crc kubenswrapper[4840]: I1205 15:10:37.834190 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.009855 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jbgm\" (UniqueName: \"kubernetes.io/projected/131b3e58-67f5-4c2a-8d70-ff674420a7a5-kube-api-access-8jbgm\") pod \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.010023 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-bundle\") pod \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.010066 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-util\") pod \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\" (UID: \"131b3e58-67f5-4c2a-8d70-ff674420a7a5\") " Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.011304 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-bundle" (OuterVolumeSpecName: "bundle") pod "131b3e58-67f5-4c2a-8d70-ff674420a7a5" (UID: "131b3e58-67f5-4c2a-8d70-ff674420a7a5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.018681 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/131b3e58-67f5-4c2a-8d70-ff674420a7a5-kube-api-access-8jbgm" (OuterVolumeSpecName: "kube-api-access-8jbgm") pod "131b3e58-67f5-4c2a-8d70-ff674420a7a5" (UID: "131b3e58-67f5-4c2a-8d70-ff674420a7a5"). InnerVolumeSpecName "kube-api-access-8jbgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.043274 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-util" (OuterVolumeSpecName: "util") pod "131b3e58-67f5-4c2a-8d70-ff674420a7a5" (UID: "131b3e58-67f5-4c2a-8d70-ff674420a7a5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.112173 4840 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.112305 4840 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/131b3e58-67f5-4c2a-8d70-ff674420a7a5-util\") on node \"crc\" DevicePath \"\"" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.112396 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jbgm\" (UniqueName: \"kubernetes.io/projected/131b3e58-67f5-4c2a-8d70-ff674420a7a5-kube-api-access-8jbgm\") on node \"crc\" DevicePath \"\"" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.551534 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" event={"ID":"131b3e58-67f5-4c2a-8d70-ff674420a7a5","Type":"ContainerDied","Data":"cacd321f4166d19a83ff1edda0bf68364c64d7fb3183a33b99bf8f00cbeb1893"} Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.551596 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cacd321f4166d19a83ff1edda0bf68364c64d7fb3183a33b99bf8f00cbeb1893" Dec 05 15:10:38 crc kubenswrapper[4840]: I1205 15:10:38.551674 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.838906 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs"] Dec 05 15:10:40 crc kubenswrapper[4840]: E1205 15:10:40.840235 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerName="extract" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.840255 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerName="extract" Dec 05 15:10:40 crc kubenswrapper[4840]: E1205 15:10:40.840275 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerName="util" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.840281 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerName="util" Dec 05 15:10:40 crc kubenswrapper[4840]: E1205 15:10:40.840293 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerName="pull" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.840299 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerName="pull" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.840498 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="131b3e58-67f5-4c2a-8d70-ff674420a7a5" containerName="extract" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.841410 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.843706 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-vmhw7" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.845248 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.845244 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.853215 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs"] Dec 05 15:10:40 crc kubenswrapper[4840]: I1205 15:10:40.949926 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wsvh\" (UniqueName: \"kubernetes.io/projected/26f97dff-d2b1-4d3d-b68a-2a8851ea6999-kube-api-access-4wsvh\") pod \"nmstate-operator-5b5b58f5c8-8zwbs\" (UID: \"26f97dff-d2b1-4d3d-b68a-2a8851ea6999\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" Dec 05 15:10:41 crc kubenswrapper[4840]: I1205 15:10:41.050965 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wsvh\" (UniqueName: \"kubernetes.io/projected/26f97dff-d2b1-4d3d-b68a-2a8851ea6999-kube-api-access-4wsvh\") pod \"nmstate-operator-5b5b58f5c8-8zwbs\" (UID: \"26f97dff-d2b1-4d3d-b68a-2a8851ea6999\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" Dec 05 15:10:41 crc kubenswrapper[4840]: I1205 15:10:41.068585 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wsvh\" (UniqueName: \"kubernetes.io/projected/26f97dff-d2b1-4d3d-b68a-2a8851ea6999-kube-api-access-4wsvh\") pod \"nmstate-operator-5b5b58f5c8-8zwbs\" (UID: \"26f97dff-d2b1-4d3d-b68a-2a8851ea6999\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" Dec 05 15:10:41 crc kubenswrapper[4840]: I1205 15:10:41.157548 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" Dec 05 15:10:41 crc kubenswrapper[4840]: I1205 15:10:41.600112 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs"] Dec 05 15:10:41 crc kubenswrapper[4840]: W1205 15:10:41.604033 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26f97dff_d2b1_4d3d_b68a_2a8851ea6999.slice/crio-ee9196ded5c496268e353915ab44d79dcba9b2632403f91207fada7f2a8ef2cb WatchSource:0}: Error finding container ee9196ded5c496268e353915ab44d79dcba9b2632403f91207fada7f2a8ef2cb: Status 404 returned error can't find the container with id ee9196ded5c496268e353915ab44d79dcba9b2632403f91207fada7f2a8ef2cb Dec 05 15:10:42 crc kubenswrapper[4840]: I1205 15:10:42.575326 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" event={"ID":"26f97dff-d2b1-4d3d-b68a-2a8851ea6999","Type":"ContainerStarted","Data":"ee9196ded5c496268e353915ab44d79dcba9b2632403f91207fada7f2a8ef2cb"} Dec 05 15:10:44 crc kubenswrapper[4840]: I1205 15:10:44.585730 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" event={"ID":"26f97dff-d2b1-4d3d-b68a-2a8851ea6999","Type":"ContainerStarted","Data":"d8a979633a8b7e41e62cbdb5f80a8b14236bb0b07adc44d563c037ece9bb5adb"} Dec 05 15:10:49 crc kubenswrapper[4840]: I1205 15:10:49.471789 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:10:49 crc kubenswrapper[4840]: I1205 15:10:49.472216 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.892410 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8zwbs" podStartSLOduration=9.751058968 podStartE2EDuration="11.892387419s" podCreationTimestamp="2025-12-05 15:10:40 +0000 UTC" firstStartedPulling="2025-12-05 15:10:41.606156868 +0000 UTC m=+719.947219492" lastFinishedPulling="2025-12-05 15:10:43.747485329 +0000 UTC m=+722.088547943" observedRunningTime="2025-12-05 15:10:44.605193969 +0000 UTC m=+722.946256593" watchObservedRunningTime="2025-12-05 15:10:51.892387419 +0000 UTC m=+730.233450033" Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.894602 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8"] Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.895416 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.897748 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-2nkt7" Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.904935 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8"] Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.912146 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv"] Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.912901 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.914713 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.941959 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv"] Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.947522 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-pcnxx"] Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.955400 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:51 crc kubenswrapper[4840]: I1205 15:10:51.990405 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9454\" (UniqueName: \"kubernetes.io/projected/1c6815a9-6a06-4ed1-81fb-fe876b3ff5db-kube-api-access-g9454\") pod \"nmstate-metrics-7f946cbc9-rldh8\" (UID: \"1c6815a9-6a06-4ed1-81fb-fe876b3ff5db\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.039221 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t"] Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.040197 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.041843 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.042237 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-78r7s" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.042595 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.048846 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t"] Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.091419 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9454\" (UniqueName: \"kubernetes.io/projected/1c6815a9-6a06-4ed1-81fb-fe876b3ff5db-kube-api-access-g9454\") pod \"nmstate-metrics-7f946cbc9-rldh8\" (UID: \"1c6815a9-6a06-4ed1-81fb-fe876b3ff5db\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.091464 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcsz4\" (UniqueName: \"kubernetes.io/projected/8b83469a-e169-45bb-b5b7-ef32a36719f2-kube-api-access-rcsz4\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.091496 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvnd8\" (UniqueName: \"kubernetes.io/projected/8ed0ecac-e512-440f-87ad-14e23ea9945f-kube-api-access-lvnd8\") pod \"nmstate-webhook-5f6d4c5ccb-2htrv\" (UID: \"8ed0ecac-e512-440f-87ad-14e23ea9945f\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.091518 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-ovs-socket\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.091616 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-nmstate-lock\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.091737 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8ed0ecac-e512-440f-87ad-14e23ea9945f-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-2htrv\" (UID: \"8ed0ecac-e512-440f-87ad-14e23ea9945f\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.091831 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-dbus-socket\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.108695 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9454\" (UniqueName: \"kubernetes.io/projected/1c6815a9-6a06-4ed1-81fb-fe876b3ff5db-kube-api-access-g9454\") pod \"nmstate-metrics-7f946cbc9-rldh8\" (UID: \"1c6815a9-6a06-4ed1-81fb-fe876b3ff5db\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193580 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-dbus-socket\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193649 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b15f2765-949a-4e53-a48b-1e691b8b1b37-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193700 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzzm5\" (UniqueName: \"kubernetes.io/projected/b15f2765-949a-4e53-a48b-1e691b8b1b37-kube-api-access-jzzm5\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193745 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b15f2765-949a-4e53-a48b-1e691b8b1b37-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193801 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcsz4\" (UniqueName: \"kubernetes.io/projected/8b83469a-e169-45bb-b5b7-ef32a36719f2-kube-api-access-rcsz4\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193860 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvnd8\" (UniqueName: \"kubernetes.io/projected/8ed0ecac-e512-440f-87ad-14e23ea9945f-kube-api-access-lvnd8\") pod \"nmstate-webhook-5f6d4c5ccb-2htrv\" (UID: \"8ed0ecac-e512-440f-87ad-14e23ea9945f\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193931 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-ovs-socket\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.193967 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-nmstate-lock\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.194011 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8ed0ecac-e512-440f-87ad-14e23ea9945f-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-2htrv\" (UID: \"8ed0ecac-e512-440f-87ad-14e23ea9945f\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.195247 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-nmstate-lock\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.195263 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-dbus-socket\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.195270 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/8b83469a-e169-45bb-b5b7-ef32a36719f2-ovs-socket\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.210523 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8ed0ecac-e512-440f-87ad-14e23ea9945f-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-2htrv\" (UID: \"8ed0ecac-e512-440f-87ad-14e23ea9945f\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.213296 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.221676 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7dcd7df9db-98f2j"] Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.222307 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.235462 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7dcd7df9db-98f2j"] Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.240502 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcsz4\" (UniqueName: \"kubernetes.io/projected/8b83469a-e169-45bb-b5b7-ef32a36719f2-kube-api-access-rcsz4\") pod \"nmstate-handler-pcnxx\" (UID: \"8b83469a-e169-45bb-b5b7-ef32a36719f2\") " pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.249642 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvnd8\" (UniqueName: \"kubernetes.io/projected/8ed0ecac-e512-440f-87ad-14e23ea9945f-kube-api-access-lvnd8\") pod \"nmstate-webhook-5f6d4c5ccb-2htrv\" (UID: \"8ed0ecac-e512-440f-87ad-14e23ea9945f\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.273422 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.294762 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b15f2765-949a-4e53-a48b-1e691b8b1b37-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.294800 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzzm5\" (UniqueName: \"kubernetes.io/projected/b15f2765-949a-4e53-a48b-1e691b8b1b37-kube-api-access-jzzm5\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.294823 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b15f2765-949a-4e53-a48b-1e691b8b1b37-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.295498 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b15f2765-949a-4e53-a48b-1e691b8b1b37-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.298720 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b15f2765-949a-4e53-a48b-1e691b8b1b37-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.314925 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzzm5\" (UniqueName: \"kubernetes.io/projected/b15f2765-949a-4e53-a48b-1e691b8b1b37-kube-api-access-jzzm5\") pod \"nmstate-console-plugin-7fbb5f6569-nhx2t\" (UID: \"b15f2765-949a-4e53-a48b-1e691b8b1b37\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.355269 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.395419 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-serving-cert\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.395475 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-trusted-ca-bundle\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.395525 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-oauth-config\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.395552 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-oauth-serving-cert\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.395581 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-config\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.395616 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-service-ca\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.395650 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79s88\" (UniqueName: \"kubernetes.io/projected/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-kube-api-access-79s88\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.408374 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8"] Dec 05 15:10:52 crc kubenswrapper[4840]: W1205 15:10:52.419825 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c6815a9_6a06_4ed1_81fb_fe876b3ff5db.slice/crio-2cff89265f95bfeba0580a8e7b196585ae1a71256c8bdb0da5da33ad956b2fd1 WatchSource:0}: Error finding container 2cff89265f95bfeba0580a8e7b196585ae1a71256c8bdb0da5da33ad956b2fd1: Status 404 returned error can't find the container with id 2cff89265f95bfeba0580a8e7b196585ae1a71256c8bdb0da5da33ad956b2fd1 Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.504506 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79s88\" (UniqueName: \"kubernetes.io/projected/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-kube-api-access-79s88\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.504560 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-serving-cert\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.504593 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-trusted-ca-bundle\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.504638 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-oauth-config\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.504660 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-oauth-serving-cert\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.504688 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-config\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.504718 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-service-ca\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.505641 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-service-ca\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.506429 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-trusted-ca-bundle\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.506935 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-oauth-serving-cert\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.507569 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-config\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.513406 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-serving-cert\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.514646 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-console-oauth-config\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.534782 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.535699 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79s88\" (UniqueName: \"kubernetes.io/projected/a5bd3510-f1d3-47f4-8052-8f3b066ee16b-kube-api-access-79s88\") pod \"console-7dcd7df9db-98f2j\" (UID: \"a5bd3510-f1d3-47f4-8052-8f3b066ee16b\") " pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.545022 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t"] Dec 05 15:10:52 crc kubenswrapper[4840]: W1205 15:10:52.551815 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb15f2765_949a_4e53_a48b_1e691b8b1b37.slice/crio-b6be824a035f3d008e6daf2fe6459113e79d3640d87a2f1d8ddd5ec6fafecdc9 WatchSource:0}: Error finding container b6be824a035f3d008e6daf2fe6459113e79d3640d87a2f1d8ddd5ec6fafecdc9: Status 404 returned error can't find the container with id b6be824a035f3d008e6daf2fe6459113e79d3640d87a2f1d8ddd5ec6fafecdc9 Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.589924 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.643635 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" event={"ID":"b15f2765-949a-4e53-a48b-1e691b8b1b37","Type":"ContainerStarted","Data":"b6be824a035f3d008e6daf2fe6459113e79d3640d87a2f1d8ddd5ec6fafecdc9"} Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.644725 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-pcnxx" event={"ID":"8b83469a-e169-45bb-b5b7-ef32a36719f2","Type":"ContainerStarted","Data":"09f809592ce8edadb8f5fa0e29f04a532ab6793846a2a7e2741c88ca6c7962b8"} Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.645500 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" event={"ID":"1c6815a9-6a06-4ed1-81fb-fe876b3ff5db","Type":"ContainerStarted","Data":"2cff89265f95bfeba0580a8e7b196585ae1a71256c8bdb0da5da33ad956b2fd1"} Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.789363 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7dcd7df9db-98f2j"] Dec 05 15:10:52 crc kubenswrapper[4840]: W1205 15:10:52.795823 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5bd3510_f1d3_47f4_8052_8f3b066ee16b.slice/crio-13eb1d3c10997f395fe519fd6dc410b267e6ce284e9bfe5a7f014bbb9474516e WatchSource:0}: Error finding container 13eb1d3c10997f395fe519fd6dc410b267e6ce284e9bfe5a7f014bbb9474516e: Status 404 returned error can't find the container with id 13eb1d3c10997f395fe519fd6dc410b267e6ce284e9bfe5a7f014bbb9474516e Dec 05 15:10:52 crc kubenswrapper[4840]: I1205 15:10:52.991112 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv"] Dec 05 15:10:52 crc kubenswrapper[4840]: W1205 15:10:52.997543 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ed0ecac_e512_440f_87ad_14e23ea9945f.slice/crio-d4752be937b8d40c9613a3c18d786beedfbf7a0f111c31ea015ef492dc4060db WatchSource:0}: Error finding container d4752be937b8d40c9613a3c18d786beedfbf7a0f111c31ea015ef492dc4060db: Status 404 returned error can't find the container with id d4752be937b8d40c9613a3c18d786beedfbf7a0f111c31ea015ef492dc4060db Dec 05 15:10:53 crc kubenswrapper[4840]: I1205 15:10:53.654463 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7dcd7df9db-98f2j" event={"ID":"a5bd3510-f1d3-47f4-8052-8f3b066ee16b","Type":"ContainerStarted","Data":"6fb6fb8e5e1585f9273c38223327eefab4646b38ba8c2c3111bdf5212dc01aff"} Dec 05 15:10:53 crc kubenswrapper[4840]: I1205 15:10:53.654511 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7dcd7df9db-98f2j" event={"ID":"a5bd3510-f1d3-47f4-8052-8f3b066ee16b","Type":"ContainerStarted","Data":"13eb1d3c10997f395fe519fd6dc410b267e6ce284e9bfe5a7f014bbb9474516e"} Dec 05 15:10:53 crc kubenswrapper[4840]: I1205 15:10:53.656244 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" event={"ID":"8ed0ecac-e512-440f-87ad-14e23ea9945f","Type":"ContainerStarted","Data":"d4752be937b8d40c9613a3c18d786beedfbf7a0f111c31ea015ef492dc4060db"} Dec 05 15:10:53 crc kubenswrapper[4840]: I1205 15:10:53.674961 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7dcd7df9db-98f2j" podStartSLOduration=1.674938213 podStartE2EDuration="1.674938213s" podCreationTimestamp="2025-12-05 15:10:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:10:53.672292727 +0000 UTC m=+732.013355341" watchObservedRunningTime="2025-12-05 15:10:53.674938213 +0000 UTC m=+732.016000827" Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.672493 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" event={"ID":"8ed0ecac-e512-440f-87ad-14e23ea9945f","Type":"ContainerStarted","Data":"6b295dbad8f174995e95583c5db53d59c4ee01f637bf7ab139d59b66a8c1b412"} Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.672934 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.674296 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" event={"ID":"b15f2765-949a-4e53-a48b-1e691b8b1b37","Type":"ContainerStarted","Data":"54b1684e7af1090c781c6ed8523628f4b417b27082a40aa0dedb7d3761ea90a9"} Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.676037 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-pcnxx" event={"ID":"8b83469a-e169-45bb-b5b7-ef32a36719f2","Type":"ContainerStarted","Data":"13cc2265f5049aedee37bb1885f0caf62a02eb065e2660553b26874191a29174"} Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.676610 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.678608 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" event={"ID":"1c6815a9-6a06-4ed1-81fb-fe876b3ff5db","Type":"ContainerStarted","Data":"6aedf80ac3e047f48065b7cafbd5447a03f2ab35576c839d721ab7aa3f99bb12"} Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.728049 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" podStartSLOduration=3.00175714 podStartE2EDuration="5.728003934s" podCreationTimestamp="2025-12-05 15:10:51 +0000 UTC" firstStartedPulling="2025-12-05 15:10:53.000066045 +0000 UTC m=+731.341128659" lastFinishedPulling="2025-12-05 15:10:55.726312839 +0000 UTC m=+734.067375453" observedRunningTime="2025-12-05 15:10:56.708108937 +0000 UTC m=+735.049171571" watchObservedRunningTime="2025-12-05 15:10:56.728003934 +0000 UTC m=+735.069066558" Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.733247 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-nhx2t" podStartSLOduration=1.577533876 podStartE2EDuration="4.733233743s" podCreationTimestamp="2025-12-05 15:10:52 +0000 UTC" firstStartedPulling="2025-12-05 15:10:52.553827344 +0000 UTC m=+730.894889958" lastFinishedPulling="2025-12-05 15:10:55.709527161 +0000 UTC m=+734.050589825" observedRunningTime="2025-12-05 15:10:56.731978798 +0000 UTC m=+735.073041422" watchObservedRunningTime="2025-12-05 15:10:56.733233743 +0000 UTC m=+735.074296367" Dec 05 15:10:56 crc kubenswrapper[4840]: I1205 15:10:56.753355 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-pcnxx" podStartSLOduration=2.322819185 podStartE2EDuration="5.753334826s" podCreationTimestamp="2025-12-05 15:10:51 +0000 UTC" firstStartedPulling="2025-12-05 15:10:52.299846004 +0000 UTC m=+730.640908728" lastFinishedPulling="2025-12-05 15:10:55.730361755 +0000 UTC m=+734.071424369" observedRunningTime="2025-12-05 15:10:56.747965513 +0000 UTC m=+735.089028137" watchObservedRunningTime="2025-12-05 15:10:56.753334826 +0000 UTC m=+735.094397450" Dec 05 15:10:58 crc kubenswrapper[4840]: I1205 15:10:58.694610 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" event={"ID":"1c6815a9-6a06-4ed1-81fb-fe876b3ff5db","Type":"ContainerStarted","Data":"989eb9882345e0b63c7b27153bf564a5cc6425ac6950c8b5a973d0f70ca54217"} Dec 05 15:10:58 crc kubenswrapper[4840]: I1205 15:10:58.719543 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-rldh8" podStartSLOduration=2.316196416 podStartE2EDuration="7.719524365s" podCreationTimestamp="2025-12-05 15:10:51 +0000 UTC" firstStartedPulling="2025-12-05 15:10:52.421367408 +0000 UTC m=+730.762430022" lastFinishedPulling="2025-12-05 15:10:57.824695357 +0000 UTC m=+736.165757971" observedRunningTime="2025-12-05 15:10:58.719331699 +0000 UTC m=+737.060394373" watchObservedRunningTime="2025-12-05 15:10:58.719524365 +0000 UTC m=+737.060586979" Dec 05 15:11:02 crc kubenswrapper[4840]: I1205 15:11:02.300999 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-pcnxx" Dec 05 15:11:02 crc kubenswrapper[4840]: I1205 15:11:02.590429 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:11:02 crc kubenswrapper[4840]: I1205 15:11:02.590485 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:11:02 crc kubenswrapper[4840]: I1205 15:11:02.596884 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:11:02 crc kubenswrapper[4840]: I1205 15:11:02.739421 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7dcd7df9db-98f2j" Dec 05 15:11:02 crc kubenswrapper[4840]: I1205 15:11:02.809937 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-zb8r7"] Dec 05 15:11:12 crc kubenswrapper[4840]: I1205 15:11:12.540998 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2htrv" Dec 05 15:11:17 crc kubenswrapper[4840]: I1205 15:11:17.694800 4840 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 15:11:19 crc kubenswrapper[4840]: I1205 15:11:19.472032 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:11:19 crc kubenswrapper[4840]: I1205 15:11:19.472398 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.116948 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt"] Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.119024 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.124407 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.127706 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt"] Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.180636 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49z5x\" (UniqueName: \"kubernetes.io/projected/276a5275-a612-4fa5-9aac-c252dc7cad0a-kube-api-access-49z5x\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.180693 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.180719 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.282464 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49z5x\" (UniqueName: \"kubernetes.io/projected/276a5275-a612-4fa5-9aac-c252dc7cad0a-kube-api-access-49z5x\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.282516 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.282541 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.284063 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.284072 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.305388 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49z5x\" (UniqueName: \"kubernetes.io/projected/276a5275-a612-4fa5-9aac-c252dc7cad0a-kube-api-access-49z5x\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.451901 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:26 crc kubenswrapper[4840]: I1205 15:11:26.944268 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt"] Dec 05 15:11:26 crc kubenswrapper[4840]: W1205 15:11:26.952618 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod276a5275_a612_4fa5_9aac_c252dc7cad0a.slice/crio-6db279fd9792dcd1d223bd0badc646590dfa57b0103920d50debaa5f431ebb2c WatchSource:0}: Error finding container 6db279fd9792dcd1d223bd0badc646590dfa57b0103920d50debaa5f431ebb2c: Status 404 returned error can't find the container with id 6db279fd9792dcd1d223bd0badc646590dfa57b0103920d50debaa5f431ebb2c Dec 05 15:11:27 crc kubenswrapper[4840]: I1205 15:11:27.895094 4840 generic.go:334] "Generic (PLEG): container finished" podID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerID="d05c7ff8f17aafae173ae05f75e98b3433ed2ba7f0cbd40b8d28bf1cb26b45fb" exitCode=0 Dec 05 15:11:27 crc kubenswrapper[4840]: I1205 15:11:27.895168 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" event={"ID":"276a5275-a612-4fa5-9aac-c252dc7cad0a","Type":"ContainerDied","Data":"d05c7ff8f17aafae173ae05f75e98b3433ed2ba7f0cbd40b8d28bf1cb26b45fb"} Dec 05 15:11:27 crc kubenswrapper[4840]: I1205 15:11:27.895235 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" event={"ID":"276a5275-a612-4fa5-9aac-c252dc7cad0a","Type":"ContainerStarted","Data":"6db279fd9792dcd1d223bd0badc646590dfa57b0103920d50debaa5f431ebb2c"} Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.007647 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-zb8r7" podUID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" containerName="console" containerID="cri-o://4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00" gracePeriod=15 Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.253818 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dttjh"] Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.256222 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.276236 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dttjh"] Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.321345 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq8tf\" (UniqueName: \"kubernetes.io/projected/4db667aa-b264-446a-bf3c-2836efe06461-kube-api-access-dq8tf\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.321508 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-utilities\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.321611 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-catalog-content\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.393753 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-zb8r7_7c949308-6fa4-47cf-9275-b4ddcdcbb30a/console/0.log" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.393815 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.422243 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-catalog-content\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.422301 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq8tf\" (UniqueName: \"kubernetes.io/projected/4db667aa-b264-446a-bf3c-2836efe06461-kube-api-access-dq8tf\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.422357 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-utilities\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.422780 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-utilities\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.422949 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-catalog-content\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.444260 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq8tf\" (UniqueName: \"kubernetes.io/projected/4db667aa-b264-446a-bf3c-2836efe06461-kube-api-access-dq8tf\") pod \"redhat-operators-dttjh\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.523442 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-config\") pod \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.523522 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-service-ca\") pod \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.523549 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngh6l\" (UniqueName: \"kubernetes.io/projected/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-kube-api-access-ngh6l\") pod \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.523575 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-oauth-config\") pod \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.523601 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-oauth-serving-cert\") pod \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.523616 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-serving-cert\") pod \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.523634 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-trusted-ca-bundle\") pod \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\" (UID: \"7c949308-6fa4-47cf-9275-b4ddcdcbb30a\") " Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.524340 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "7c949308-6fa4-47cf-9275-b4ddcdcbb30a" (UID: "7c949308-6fa4-47cf-9275-b4ddcdcbb30a"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.524373 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-service-ca" (OuterVolumeSpecName: "service-ca") pod "7c949308-6fa4-47cf-9275-b4ddcdcbb30a" (UID: "7c949308-6fa4-47cf-9275-b4ddcdcbb30a"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.524379 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "7c949308-6fa4-47cf-9275-b4ddcdcbb30a" (UID: "7c949308-6fa4-47cf-9275-b4ddcdcbb30a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.524355 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-config" (OuterVolumeSpecName: "console-config") pod "7c949308-6fa4-47cf-9275-b4ddcdcbb30a" (UID: "7c949308-6fa4-47cf-9275-b4ddcdcbb30a"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.530569 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "7c949308-6fa4-47cf-9275-b4ddcdcbb30a" (UID: "7c949308-6fa4-47cf-9275-b4ddcdcbb30a"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.530888 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-kube-api-access-ngh6l" (OuterVolumeSpecName: "kube-api-access-ngh6l") pod "7c949308-6fa4-47cf-9275-b4ddcdcbb30a" (UID: "7c949308-6fa4-47cf-9275-b4ddcdcbb30a"). InnerVolumeSpecName "kube-api-access-ngh6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.530939 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "7c949308-6fa4-47cf-9275-b4ddcdcbb30a" (UID: "7c949308-6fa4-47cf-9275-b4ddcdcbb30a"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.580973 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.624986 4840 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.625028 4840 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.625041 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngh6l\" (UniqueName: \"kubernetes.io/projected/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-kube-api-access-ngh6l\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.625052 4840 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.625064 4840 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.625077 4840 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.625088 4840 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c949308-6fa4-47cf-9275-b4ddcdcbb30a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.772005 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dttjh"] Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.902041 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-zb8r7_7c949308-6fa4-47cf-9275-b4ddcdcbb30a/console/0.log" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.902098 4840 generic.go:334] "Generic (PLEG): container finished" podID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" containerID="4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00" exitCode=2 Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.902141 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zb8r7" event={"ID":"7c949308-6fa4-47cf-9275-b4ddcdcbb30a","Type":"ContainerDied","Data":"4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00"} Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.902189 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zb8r7" event={"ID":"7c949308-6fa4-47cf-9275-b4ddcdcbb30a","Type":"ContainerDied","Data":"44c8779241357345fdb01994e04250fd34d695cd5523d76c96c67a7d612fcf62"} Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.902194 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zb8r7" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.902220 4840 scope.go:117] "RemoveContainer" containerID="4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.903289 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dttjh" event={"ID":"4db667aa-b264-446a-bf3c-2836efe06461","Type":"ContainerStarted","Data":"1ed0d1f683c3f131dd42e9088dbf9d65aee5a3bfd1215e5022759a5574f26270"} Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.918921 4840 scope.go:117] "RemoveContainer" containerID="4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00" Dec 05 15:11:28 crc kubenswrapper[4840]: E1205 15:11:28.920255 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00\": container with ID starting with 4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00 not found: ID does not exist" containerID="4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.920294 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00"} err="failed to get container status \"4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00\": rpc error: code = NotFound desc = could not find container \"4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00\": container with ID starting with 4666de25dc4adef932fa1e7b33810e1e79aa61433c87ea0bcd100221134d3e00 not found: ID does not exist" Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.932122 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-zb8r7"] Dec 05 15:11:28 crc kubenswrapper[4840]: I1205 15:11:28.938344 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-zb8r7"] Dec 05 15:11:29 crc kubenswrapper[4840]: I1205 15:11:29.914445 4840 generic.go:334] "Generic (PLEG): container finished" podID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerID="6c1a0cc55b23a7b34380afebd7629c86120ac1cb030796b69299b8336c236ec1" exitCode=0 Dec 05 15:11:29 crc kubenswrapper[4840]: I1205 15:11:29.914674 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" event={"ID":"276a5275-a612-4fa5-9aac-c252dc7cad0a","Type":"ContainerDied","Data":"6c1a0cc55b23a7b34380afebd7629c86120ac1cb030796b69299b8336c236ec1"} Dec 05 15:11:29 crc kubenswrapper[4840]: I1205 15:11:29.917510 4840 generic.go:334] "Generic (PLEG): container finished" podID="4db667aa-b264-446a-bf3c-2836efe06461" containerID="f4eb81817c4a3cf7d497a9de806d3aa13f92cd36a7012ce461d636c81ffb643c" exitCode=0 Dec 05 15:11:29 crc kubenswrapper[4840]: I1205 15:11:29.917578 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dttjh" event={"ID":"4db667aa-b264-446a-bf3c-2836efe06461","Type":"ContainerDied","Data":"f4eb81817c4a3cf7d497a9de806d3aa13f92cd36a7012ce461d636c81ffb643c"} Dec 05 15:11:30 crc kubenswrapper[4840]: I1205 15:11:30.077582 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" path="/var/lib/kubelet/pods/7c949308-6fa4-47cf-9275-b4ddcdcbb30a/volumes" Dec 05 15:11:30 crc kubenswrapper[4840]: I1205 15:11:30.928029 4840 generic.go:334] "Generic (PLEG): container finished" podID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerID="a6dc2ef5fcf52f6d5656f09f5a750cf8c560a9aadc7d8a756ab5925ee93e794a" exitCode=0 Dec 05 15:11:30 crc kubenswrapper[4840]: I1205 15:11:30.928146 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" event={"ID":"276a5275-a612-4fa5-9aac-c252dc7cad0a","Type":"ContainerDied","Data":"a6dc2ef5fcf52f6d5656f09f5a750cf8c560a9aadc7d8a756ab5925ee93e794a"} Dec 05 15:11:30 crc kubenswrapper[4840]: I1205 15:11:30.930848 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dttjh" event={"ID":"4db667aa-b264-446a-bf3c-2836efe06461","Type":"ContainerStarted","Data":"994fc92a4c06f5f9a5c51e01dec0ef24ce250fb823b2de3e96ebbd740fca0c22"} Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.436503 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.627460 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49z5x\" (UniqueName: \"kubernetes.io/projected/276a5275-a612-4fa5-9aac-c252dc7cad0a-kube-api-access-49z5x\") pod \"276a5275-a612-4fa5-9aac-c252dc7cad0a\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.627582 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-util\") pod \"276a5275-a612-4fa5-9aac-c252dc7cad0a\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.627618 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-bundle\") pod \"276a5275-a612-4fa5-9aac-c252dc7cad0a\" (UID: \"276a5275-a612-4fa5-9aac-c252dc7cad0a\") " Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.628639 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-bundle" (OuterVolumeSpecName: "bundle") pod "276a5275-a612-4fa5-9aac-c252dc7cad0a" (UID: "276a5275-a612-4fa5-9aac-c252dc7cad0a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.640019 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/276a5275-a612-4fa5-9aac-c252dc7cad0a-kube-api-access-49z5x" (OuterVolumeSpecName: "kube-api-access-49z5x") pod "276a5275-a612-4fa5-9aac-c252dc7cad0a" (UID: "276a5275-a612-4fa5-9aac-c252dc7cad0a"). InnerVolumeSpecName "kube-api-access-49z5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.730176 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-util" (OuterVolumeSpecName: "util") pod "276a5275-a612-4fa5-9aac-c252dc7cad0a" (UID: "276a5275-a612-4fa5-9aac-c252dc7cad0a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.788696 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49z5x\" (UniqueName: \"kubernetes.io/projected/276a5275-a612-4fa5-9aac-c252dc7cad0a-kube-api-access-49z5x\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.788969 4840 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-util\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.789051 4840 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/276a5275-a612-4fa5-9aac-c252dc7cad0a-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.946934 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" event={"ID":"276a5275-a612-4fa5-9aac-c252dc7cad0a","Type":"ContainerDied","Data":"6db279fd9792dcd1d223bd0badc646590dfa57b0103920d50debaa5f431ebb2c"} Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.946974 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6db279fd9792dcd1d223bd0badc646590dfa57b0103920d50debaa5f431ebb2c" Dec 05 15:11:32 crc kubenswrapper[4840]: I1205 15:11:32.947022 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt" Dec 05 15:11:33 crc kubenswrapper[4840]: I1205 15:11:33.954812 4840 generic.go:334] "Generic (PLEG): container finished" podID="4db667aa-b264-446a-bf3c-2836efe06461" containerID="994fc92a4c06f5f9a5c51e01dec0ef24ce250fb823b2de3e96ebbd740fca0c22" exitCode=0 Dec 05 15:11:33 crc kubenswrapper[4840]: I1205 15:11:33.954904 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dttjh" event={"ID":"4db667aa-b264-446a-bf3c-2836efe06461","Type":"ContainerDied","Data":"994fc92a4c06f5f9a5c51e01dec0ef24ce250fb823b2de3e96ebbd740fca0c22"} Dec 05 15:11:34 crc kubenswrapper[4840]: I1205 15:11:34.966591 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dttjh" event={"ID":"4db667aa-b264-446a-bf3c-2836efe06461","Type":"ContainerStarted","Data":"e2639cc97ecf4503d79c2d3d27410073d3b17e127a0fce1f654e7c4246011ece"} Dec 05 15:11:34 crc kubenswrapper[4840]: I1205 15:11:34.988066 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dttjh" podStartSLOduration=2.538782479 podStartE2EDuration="6.988044009s" podCreationTimestamp="2025-12-05 15:11:28 +0000 UTC" firstStartedPulling="2025-12-05 15:11:29.919179837 +0000 UTC m=+768.260242461" lastFinishedPulling="2025-12-05 15:11:34.368441357 +0000 UTC m=+772.709503991" observedRunningTime="2025-12-05 15:11:34.983912912 +0000 UTC m=+773.324975566" watchObservedRunningTime="2025-12-05 15:11:34.988044009 +0000 UTC m=+773.329106653" Dec 05 15:11:38 crc kubenswrapper[4840]: I1205 15:11:38.582039 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:38 crc kubenswrapper[4840]: I1205 15:11:38.582353 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:39 crc kubenswrapper[4840]: I1205 15:11:39.686998 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dttjh" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="registry-server" probeResult="failure" output=< Dec 05 15:11:39 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:11:39 crc kubenswrapper[4840]: > Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.295226 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b"] Dec 05 15:11:43 crc kubenswrapper[4840]: E1205 15:11:43.295847 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerName="util" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.295876 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerName="util" Dec 05 15:11:43 crc kubenswrapper[4840]: E1205 15:11:43.295892 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerName="extract" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.295899 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerName="extract" Dec 05 15:11:43 crc kubenswrapper[4840]: E1205 15:11:43.295911 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerName="pull" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.295919 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerName="pull" Dec 05 15:11:43 crc kubenswrapper[4840]: E1205 15:11:43.295931 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" containerName="console" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.295939 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" containerName="console" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.296074 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="276a5275-a612-4fa5-9aac-c252dc7cad0a" containerName="extract" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.296089 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c949308-6fa4-47cf-9275-b4ddcdcbb30a" containerName="console" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.296534 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.298270 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.298275 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.298273 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.298698 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.299427 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-w5l2b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.317075 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b"] Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.409249 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5debe082-c97c-4bb6-8eb6-475c0b97e485-apiservice-cert\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.409341 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2nxh\" (UniqueName: \"kubernetes.io/projected/5debe082-c97c-4bb6-8eb6-475c0b97e485-kube-api-access-c2nxh\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.409530 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5debe082-c97c-4bb6-8eb6-475c0b97e485-webhook-cert\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.511103 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5debe082-c97c-4bb6-8eb6-475c0b97e485-apiservice-cert\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.511171 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2nxh\" (UniqueName: \"kubernetes.io/projected/5debe082-c97c-4bb6-8eb6-475c0b97e485-kube-api-access-c2nxh\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.511193 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5debe082-c97c-4bb6-8eb6-475c0b97e485-webhook-cert\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.516078 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5debe082-c97c-4bb6-8eb6-475c0b97e485-webhook-cert\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.518671 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5debe082-c97c-4bb6-8eb6-475c0b97e485-apiservice-cert\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.541088 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2nxh\" (UniqueName: \"kubernetes.io/projected/5debe082-c97c-4bb6-8eb6-475c0b97e485-kube-api-access-c2nxh\") pod \"metallb-operator-controller-manager-6c5f98fb9d-c7t9b\" (UID: \"5debe082-c97c-4bb6-8eb6-475c0b97e485\") " pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.687306 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.852074 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-f894b867-vrfmt"] Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.852975 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.855172 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.855364 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.855531 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-gxgqg" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.868377 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f894b867-vrfmt"] Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.921493 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-apiservice-cert\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.921539 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqb2c\" (UniqueName: \"kubernetes.io/projected/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-kube-api-access-bqb2c\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:43 crc kubenswrapper[4840]: I1205 15:11:43.921604 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-webhook-cert\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.022454 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqb2c\" (UniqueName: \"kubernetes.io/projected/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-kube-api-access-bqb2c\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.022502 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-apiservice-cert\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.022550 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-webhook-cert\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.032486 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-webhook-cert\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.033699 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-apiservice-cert\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.042514 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqb2c\" (UniqueName: \"kubernetes.io/projected/b51981bb-b0ed-4c6a-b82d-ae3155eefcb5-kube-api-access-bqb2c\") pod \"metallb-operator-webhook-server-f894b867-vrfmt\" (UID: \"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5\") " pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.175177 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.273039 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b"] Dec 05 15:11:44 crc kubenswrapper[4840]: W1205 15:11:44.285885 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5debe082_c97c_4bb6_8eb6_475c0b97e485.slice/crio-40b59ae765316fe790922f0cdc0ffddd0f5c11158422e2a2f200c1bf4232d992 WatchSource:0}: Error finding container 40b59ae765316fe790922f0cdc0ffddd0f5c11158422e2a2f200c1bf4232d992: Status 404 returned error can't find the container with id 40b59ae765316fe790922f0cdc0ffddd0f5c11158422e2a2f200c1bf4232d992 Dec 05 15:11:44 crc kubenswrapper[4840]: I1205 15:11:44.674381 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f894b867-vrfmt"] Dec 05 15:11:45 crc kubenswrapper[4840]: I1205 15:11:45.157669 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" event={"ID":"5debe082-c97c-4bb6-8eb6-475c0b97e485","Type":"ContainerStarted","Data":"40b59ae765316fe790922f0cdc0ffddd0f5c11158422e2a2f200c1bf4232d992"} Dec 05 15:11:45 crc kubenswrapper[4840]: I1205 15:11:45.159473 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" event={"ID":"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5","Type":"ContainerStarted","Data":"c3ed798b57de1d2226b7bcaecff58fe02443eb7d67fde841a80e5945822f1c16"} Dec 05 15:11:48 crc kubenswrapper[4840]: I1205 15:11:48.676515 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:48 crc kubenswrapper[4840]: I1205 15:11:48.727094 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:48 crc kubenswrapper[4840]: I1205 15:11:48.909842 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dttjh"] Dec 05 15:11:49 crc kubenswrapper[4840]: I1205 15:11:49.472417 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:11:49 crc kubenswrapper[4840]: I1205 15:11:49.472498 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:11:49 crc kubenswrapper[4840]: I1205 15:11:49.472547 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:11:49 crc kubenswrapper[4840]: I1205 15:11:49.473187 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"83830503ae8c68349e9a52130f5a53e1e9c359c7b97c632d824c7d4a08e0e1f1"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:11:49 crc kubenswrapper[4840]: I1205 15:11:49.473449 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://83830503ae8c68349e9a52130f5a53e1e9c359c7b97c632d824c7d4a08e0e1f1" gracePeriod=600 Dec 05 15:11:50 crc kubenswrapper[4840]: I1205 15:11:50.189138 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="83830503ae8c68349e9a52130f5a53e1e9c359c7b97c632d824c7d4a08e0e1f1" exitCode=0 Dec 05 15:11:50 crc kubenswrapper[4840]: I1205 15:11:50.189219 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"83830503ae8c68349e9a52130f5a53e1e9c359c7b97c632d824c7d4a08e0e1f1"} Dec 05 15:11:50 crc kubenswrapper[4840]: I1205 15:11:50.189278 4840 scope.go:117] "RemoveContainer" containerID="5fb60fdb5ee2de7da83573520684a8ffa3aaf560d6a723c6162e48846e14816d" Dec 05 15:11:50 crc kubenswrapper[4840]: I1205 15:11:50.189345 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dttjh" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="registry-server" containerID="cri-o://e2639cc97ecf4503d79c2d3d27410073d3b17e127a0fce1f654e7c4246011ece" gracePeriod=2 Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.207002 4840 generic.go:334] "Generic (PLEG): container finished" podID="4db667aa-b264-446a-bf3c-2836efe06461" containerID="e2639cc97ecf4503d79c2d3d27410073d3b17e127a0fce1f654e7c4246011ece" exitCode=0 Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.207037 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dttjh" event={"ID":"4db667aa-b264-446a-bf3c-2836efe06461","Type":"ContainerDied","Data":"e2639cc97ecf4503d79c2d3d27410073d3b17e127a0fce1f654e7c4246011ece"} Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.229887 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.357038 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq8tf\" (UniqueName: \"kubernetes.io/projected/4db667aa-b264-446a-bf3c-2836efe06461-kube-api-access-dq8tf\") pod \"4db667aa-b264-446a-bf3c-2836efe06461\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.357120 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-utilities\") pod \"4db667aa-b264-446a-bf3c-2836efe06461\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.357246 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-catalog-content\") pod \"4db667aa-b264-446a-bf3c-2836efe06461\" (UID: \"4db667aa-b264-446a-bf3c-2836efe06461\") " Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.358468 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-utilities" (OuterVolumeSpecName: "utilities") pod "4db667aa-b264-446a-bf3c-2836efe06461" (UID: "4db667aa-b264-446a-bf3c-2836efe06461"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.362458 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4db667aa-b264-446a-bf3c-2836efe06461-kube-api-access-dq8tf" (OuterVolumeSpecName: "kube-api-access-dq8tf") pod "4db667aa-b264-446a-bf3c-2836efe06461" (UID: "4db667aa-b264-446a-bf3c-2836efe06461"). InnerVolumeSpecName "kube-api-access-dq8tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.458181 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq8tf\" (UniqueName: \"kubernetes.io/projected/4db667aa-b264-446a-bf3c-2836efe06461-kube-api-access-dq8tf\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.458214 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.458850 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4db667aa-b264-446a-bf3c-2836efe06461" (UID: "4db667aa-b264-446a-bf3c-2836efe06461"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:11:51 crc kubenswrapper[4840]: I1205 15:11:51.559136 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4db667aa-b264-446a-bf3c-2836efe06461-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.217889 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" event={"ID":"5debe082-c97c-4bb6-8eb6-475c0b97e485","Type":"ContainerStarted","Data":"7da0273e60a82a3088c4bf7f9c208b4d76148084f6978d200675e789bba25a46"} Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.218272 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.219952 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"d69a7eb1c40b94e45105da8261bb07a2f04367f59caef02ba4c0e3aa6dc28e33"} Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.222369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" event={"ID":"b51981bb-b0ed-4c6a-b82d-ae3155eefcb5","Type":"ContainerStarted","Data":"3548e239840fe3b879b6cf624773c573d8afa0f286aac22f9632e52d0738f060"} Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.222435 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.224481 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dttjh" event={"ID":"4db667aa-b264-446a-bf3c-2836efe06461","Type":"ContainerDied","Data":"1ed0d1f683c3f131dd42e9088dbf9d65aee5a3bfd1215e5022759a5574f26270"} Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.224518 4840 scope.go:117] "RemoveContainer" containerID="e2639cc97ecf4503d79c2d3d27410073d3b17e127a0fce1f654e7c4246011ece" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.224541 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dttjh" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.241441 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" podStartSLOduration=2.531342367 podStartE2EDuration="9.241414774s" podCreationTimestamp="2025-12-05 15:11:43 +0000 UTC" firstStartedPulling="2025-12-05 15:11:44.294165301 +0000 UTC m=+782.635227915" lastFinishedPulling="2025-12-05 15:11:51.004237698 +0000 UTC m=+789.345300322" observedRunningTime="2025-12-05 15:11:52.23633941 +0000 UTC m=+790.577402044" watchObservedRunningTime="2025-12-05 15:11:52.241414774 +0000 UTC m=+790.582477408" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.247534 4840 scope.go:117] "RemoveContainer" containerID="994fc92a4c06f5f9a5c51e01dec0ef24ce250fb823b2de3e96ebbd740fca0c22" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.266843 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dttjh"] Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.275535 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dttjh"] Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.285085 4840 scope.go:117] "RemoveContainer" containerID="f4eb81817c4a3cf7d497a9de806d3aa13f92cd36a7012ce461d636c81ffb643c" Dec 05 15:11:52 crc kubenswrapper[4840]: I1205 15:11:52.307408 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" podStartSLOduration=2.960977093 podStartE2EDuration="9.307387415s" podCreationTimestamp="2025-12-05 15:11:43 +0000 UTC" firstStartedPulling="2025-12-05 15:11:44.687705338 +0000 UTC m=+783.028767942" lastFinishedPulling="2025-12-05 15:11:51.03411565 +0000 UTC m=+789.375178264" observedRunningTime="2025-12-05 15:11:52.28580862 +0000 UTC m=+790.626871254" watchObservedRunningTime="2025-12-05 15:11:52.307387415 +0000 UTC m=+790.648450029" Dec 05 15:11:54 crc kubenswrapper[4840]: I1205 15:11:54.075589 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4db667aa-b264-446a-bf3c-2836efe06461" path="/var/lib/kubelet/pods/4db667aa-b264-446a-bf3c-2836efe06461/volumes" Dec 05 15:12:04 crc kubenswrapper[4840]: I1205 15:12:04.180478 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-f894b867-vrfmt" Dec 05 15:12:23 crc kubenswrapper[4840]: I1205 15:12:23.692289 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6c5f98fb9d-c7t9b" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.431031 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd"] Dec 05 15:12:24 crc kubenswrapper[4840]: E1205 15:12:24.431304 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="extract-content" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.431323 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="extract-content" Dec 05 15:12:24 crc kubenswrapper[4840]: E1205 15:12:24.431346 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="extract-utilities" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.431355 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="extract-utilities" Dec 05 15:12:24 crc kubenswrapper[4840]: E1205 15:12:24.431379 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="registry-server" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.431388 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="registry-server" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.431517 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="4db667aa-b264-446a-bf3c-2836efe06461" containerName="registry-server" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.431992 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.434365 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.434553 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-vc9zn" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.439043 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-6vbng"] Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.441657 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.446085 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.446541 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.455207 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd"] Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.519536 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-8v7db"] Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.520373 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.522709 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-4d765" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.523074 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.523450 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.525432 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.534087 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-bbxvm"] Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.535186 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.537710 4840 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.551952 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-bbxvm"] Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584388 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-conf\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584426 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/475d466e-01af-4a8b-85c9-585a152bc376-metallb-excludel2\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584454 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-sockets\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584471 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-reloader\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584489 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2gkl\" (UniqueName: \"kubernetes.io/projected/475d466e-01af-4a8b-85c9-585a152bc376-kube-api-access-l2gkl\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584531 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p2cz\" (UniqueName: \"kubernetes.io/projected/146837b6-fb7d-4368-9ae3-bc4106ff72de-kube-api-access-8p2cz\") pod \"frr-k8s-webhook-server-7fcb986d4-xhqsd\" (UID: \"146837b6-fb7d-4368-9ae3-bc4106ff72de\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584554 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/146837b6-fb7d-4368-9ae3-bc4106ff72de-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-xhqsd\" (UID: \"146837b6-fb7d-4368-9ae3-bc4106ff72de\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584568 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c47f541-43bf-400b-bb9f-e3239ad0c636-metrics-certs\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584602 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-startup\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584623 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-metrics\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584640 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7cv5\" (UniqueName: \"kubernetes.io/projected/4c47f541-43bf-400b-bb9f-e3239ad0c636-kube-api-access-k7cv5\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584656 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-metrics-certs\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.584681 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686185 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-sockets\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686250 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-reloader\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686294 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2gkl\" (UniqueName: \"kubernetes.io/projected/475d466e-01af-4a8b-85c9-585a152bc376-kube-api-access-l2gkl\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686332 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p2cz\" (UniqueName: \"kubernetes.io/projected/146837b6-fb7d-4368-9ae3-bc4106ff72de-kube-api-access-8p2cz\") pod \"frr-k8s-webhook-server-7fcb986d4-xhqsd\" (UID: \"146837b6-fb7d-4368-9ae3-bc4106ff72de\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686363 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dffc4\" (UniqueName: \"kubernetes.io/projected/233d6836-c070-4d58-8f3d-6145a065240d-kube-api-access-dffc4\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686392 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/146837b6-fb7d-4368-9ae3-bc4106ff72de-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-xhqsd\" (UID: \"146837b6-fb7d-4368-9ae3-bc4106ff72de\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686416 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c47f541-43bf-400b-bb9f-e3239ad0c636-metrics-certs\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686454 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-startup\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686481 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/233d6836-c070-4d58-8f3d-6145a065240d-cert\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686518 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-metrics\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686543 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7cv5\" (UniqueName: \"kubernetes.io/projected/4c47f541-43bf-400b-bb9f-e3239ad0c636-kube-api-access-k7cv5\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686566 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-metrics-certs\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686644 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/233d6836-c070-4d58-8f3d-6145a065240d-metrics-certs\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: E1205 15:12:24.686676 4840 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686724 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: E1205 15:12:24.686738 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-metrics-certs podName:475d466e-01af-4a8b-85c9-585a152bc376 nodeName:}" failed. No retries permitted until 2025-12-05 15:12:25.186717794 +0000 UTC m=+823.527780418 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-metrics-certs") pod "speaker-8v7db" (UID: "475d466e-01af-4a8b-85c9-585a152bc376") : secret "speaker-certs-secret" not found Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686759 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/475d466e-01af-4a8b-85c9-585a152bc376-metallb-excludel2\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.686789 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-conf\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.687126 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-reloader\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.687157 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-sockets\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.687247 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-conf\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: E1205 15:12:24.687348 4840 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.687384 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/4c47f541-43bf-400b-bb9f-e3239ad0c636-metrics\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: E1205 15:12:24.687437 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist podName:475d466e-01af-4a8b-85c9-585a152bc376 nodeName:}" failed. No retries permitted until 2025-12-05 15:12:25.187416943 +0000 UTC m=+823.528479567 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist") pod "speaker-8v7db" (UID: "475d466e-01af-4a8b-85c9-585a152bc376") : secret "metallb-memberlist" not found Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.687822 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/475d466e-01af-4a8b-85c9-585a152bc376-metallb-excludel2\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.688799 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/4c47f541-43bf-400b-bb9f-e3239ad0c636-frr-startup\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.694585 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4c47f541-43bf-400b-bb9f-e3239ad0c636-metrics-certs\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.694711 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/146837b6-fb7d-4368-9ae3-bc4106ff72de-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-xhqsd\" (UID: \"146837b6-fb7d-4368-9ae3-bc4106ff72de\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.710420 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2gkl\" (UniqueName: \"kubernetes.io/projected/475d466e-01af-4a8b-85c9-585a152bc376-kube-api-access-l2gkl\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.714041 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7cv5\" (UniqueName: \"kubernetes.io/projected/4c47f541-43bf-400b-bb9f-e3239ad0c636-kube-api-access-k7cv5\") pod \"frr-k8s-6vbng\" (UID: \"4c47f541-43bf-400b-bb9f-e3239ad0c636\") " pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.715216 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p2cz\" (UniqueName: \"kubernetes.io/projected/146837b6-fb7d-4368-9ae3-bc4106ff72de-kube-api-access-8p2cz\") pod \"frr-k8s-webhook-server-7fcb986d4-xhqsd\" (UID: \"146837b6-fb7d-4368-9ae3-bc4106ff72de\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.762463 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.771454 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.787741 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dffc4\" (UniqueName: \"kubernetes.io/projected/233d6836-c070-4d58-8f3d-6145a065240d-kube-api-access-dffc4\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.787803 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/233d6836-c070-4d58-8f3d-6145a065240d-cert\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.787853 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/233d6836-c070-4d58-8f3d-6145a065240d-metrics-certs\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.791188 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/233d6836-c070-4d58-8f3d-6145a065240d-metrics-certs\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.793954 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/233d6836-c070-4d58-8f3d-6145a065240d-cert\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.818690 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dffc4\" (UniqueName: \"kubernetes.io/projected/233d6836-c070-4d58-8f3d-6145a065240d-kube-api-access-dffc4\") pod \"controller-f8648f98b-bbxvm\" (UID: \"233d6836-c070-4d58-8f3d-6145a065240d\") " pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:24 crc kubenswrapper[4840]: I1205 15:12:24.846904 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.044306 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd"] Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.113236 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-bbxvm"] Dec 05 15:12:25 crc kubenswrapper[4840]: W1205 15:12:25.119302 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod233d6836_c070_4d58_8f3d_6145a065240d.slice/crio-c26d11375a7c4546f3f1960c2e55596f695654708bad95a5a79b25d88ed7e5d7 WatchSource:0}: Error finding container c26d11375a7c4546f3f1960c2e55596f695654708bad95a5a79b25d88ed7e5d7: Status 404 returned error can't find the container with id c26d11375a7c4546f3f1960c2e55596f695654708bad95a5a79b25d88ed7e5d7 Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.194214 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-metrics-certs\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.194279 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:25 crc kubenswrapper[4840]: E1205 15:12:25.194423 4840 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 15:12:25 crc kubenswrapper[4840]: E1205 15:12:25.194482 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist podName:475d466e-01af-4a8b-85c9-585a152bc376 nodeName:}" failed. No retries permitted until 2025-12-05 15:12:26.194464028 +0000 UTC m=+824.535526642 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist") pod "speaker-8v7db" (UID: "475d466e-01af-4a8b-85c9-585a152bc376") : secret "metallb-memberlist" not found Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.201530 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-metrics-certs\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.416985 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-bbxvm" event={"ID":"233d6836-c070-4d58-8f3d-6145a065240d","Type":"ContainerStarted","Data":"edc87b7a00a9b87e9f140a9cc1f7f0d6bcc10268c962caf9879f1e36d5faa765"} Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.417296 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-bbxvm" event={"ID":"233d6836-c070-4d58-8f3d-6145a065240d","Type":"ContainerStarted","Data":"2c28ce85c0b2d9100b85cb9b837e2a2ec5fe04448afefe810a1260b2e0df1e75"} Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.417308 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-bbxvm" event={"ID":"233d6836-c070-4d58-8f3d-6145a065240d","Type":"ContainerStarted","Data":"c26d11375a7c4546f3f1960c2e55596f695654708bad95a5a79b25d88ed7e5d7"} Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.418108 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.419027 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" event={"ID":"146837b6-fb7d-4368-9ae3-bc4106ff72de","Type":"ContainerStarted","Data":"23b8b1d21134d42cadc7fc416ede448acc46e8015e95c2418cde4dd36336baf6"} Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.419998 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerStarted","Data":"9e3b4030e8c0f568a49a432cbbd0ae4d1025b476f70885c70e36efa172762292"} Dec 05 15:12:25 crc kubenswrapper[4840]: I1205 15:12:25.439417 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-bbxvm" podStartSLOduration=1.439388959 podStartE2EDuration="1.439388959s" podCreationTimestamp="2025-12-05 15:12:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:12:25.434964153 +0000 UTC m=+823.776026767" watchObservedRunningTime="2025-12-05 15:12:25.439388959 +0000 UTC m=+823.780451593" Dec 05 15:12:26 crc kubenswrapper[4840]: I1205 15:12:26.223475 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:26 crc kubenswrapper[4840]: I1205 15:12:26.233538 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/475d466e-01af-4a8b-85c9-585a152bc376-memberlist\") pod \"speaker-8v7db\" (UID: \"475d466e-01af-4a8b-85c9-585a152bc376\") " pod="metallb-system/speaker-8v7db" Dec 05 15:12:26 crc kubenswrapper[4840]: I1205 15:12:26.336853 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8v7db" Dec 05 15:12:26 crc kubenswrapper[4840]: I1205 15:12:26.452710 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8v7db" event={"ID":"475d466e-01af-4a8b-85c9-585a152bc376","Type":"ContainerStarted","Data":"ab4124f157263210a02d14fe69e33e529b1e0194d7cd4f3a55bec682c248815d"} Dec 05 15:12:27 crc kubenswrapper[4840]: I1205 15:12:27.461134 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8v7db" event={"ID":"475d466e-01af-4a8b-85c9-585a152bc376","Type":"ContainerStarted","Data":"c8ee197b5d58e93f04e0f2f9483af835c950d593b7b2fa7599d583c32ecb8c4b"} Dec 05 15:12:27 crc kubenswrapper[4840]: I1205 15:12:27.461486 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8v7db" event={"ID":"475d466e-01af-4a8b-85c9-585a152bc376","Type":"ContainerStarted","Data":"dfde97ba829cb1ef94acc5cec2304994bf540527a1e2c37858fd359657139768"} Dec 05 15:12:27 crc kubenswrapper[4840]: I1205 15:12:27.461520 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-8v7db" Dec 05 15:12:32 crc kubenswrapper[4840]: I1205 15:12:32.116549 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-8v7db" podStartSLOduration=8.116524068 podStartE2EDuration="8.116524068s" podCreationTimestamp="2025-12-05 15:12:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:12:27.478580849 +0000 UTC m=+825.819643463" watchObservedRunningTime="2025-12-05 15:12:32.116524068 +0000 UTC m=+830.457586682" Dec 05 15:12:34 crc kubenswrapper[4840]: I1205 15:12:34.511990 4840 generic.go:334] "Generic (PLEG): container finished" podID="4c47f541-43bf-400b-bb9f-e3239ad0c636" containerID="94fe37c3ee0fc1e06fb470dcb9d35ab34d8ffb258bc58ef753749c13e71619e3" exitCode=0 Dec 05 15:12:34 crc kubenswrapper[4840]: I1205 15:12:34.512052 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerDied","Data":"94fe37c3ee0fc1e06fb470dcb9d35ab34d8ffb258bc58ef753749c13e71619e3"} Dec 05 15:12:34 crc kubenswrapper[4840]: I1205 15:12:34.514854 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" event={"ID":"146837b6-fb7d-4368-9ae3-bc4106ff72de","Type":"ContainerStarted","Data":"be3cf6ad98b42c93c456b7ae447d7ea699defae98c1e067bf2aed0b2b2e8b42e"} Dec 05 15:12:34 crc kubenswrapper[4840]: I1205 15:12:34.515022 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:34 crc kubenswrapper[4840]: I1205 15:12:34.563909 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" podStartSLOduration=1.7969208220000001 podStartE2EDuration="10.563893004s" podCreationTimestamp="2025-12-05 15:12:24 +0000 UTC" firstStartedPulling="2025-12-05 15:12:25.050305318 +0000 UTC m=+823.391367932" lastFinishedPulling="2025-12-05 15:12:33.81727745 +0000 UTC m=+832.158340114" observedRunningTime="2025-12-05 15:12:34.563352448 +0000 UTC m=+832.904415062" watchObservedRunningTime="2025-12-05 15:12:34.563893004 +0000 UTC m=+832.904955618" Dec 05 15:12:35 crc kubenswrapper[4840]: I1205 15:12:35.521423 4840 generic.go:334] "Generic (PLEG): container finished" podID="4c47f541-43bf-400b-bb9f-e3239ad0c636" containerID="6efd8a6945e7fe6a8d51592561a05e71e16c530f6210fd6b6f80e35ec00dd3f5" exitCode=0 Dec 05 15:12:35 crc kubenswrapper[4840]: I1205 15:12:35.521462 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerDied","Data":"6efd8a6945e7fe6a8d51592561a05e71e16c530f6210fd6b6f80e35ec00dd3f5"} Dec 05 15:12:36 crc kubenswrapper[4840]: I1205 15:12:36.344022 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-8v7db" Dec 05 15:12:36 crc kubenswrapper[4840]: I1205 15:12:36.534294 4840 generic.go:334] "Generic (PLEG): container finished" podID="4c47f541-43bf-400b-bb9f-e3239ad0c636" containerID="21366eaf29aa53424a872a8437cafff32b9c593878a135594e82dc1e346943b2" exitCode=0 Dec 05 15:12:36 crc kubenswrapper[4840]: I1205 15:12:36.534367 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerDied","Data":"21366eaf29aa53424a872a8437cafff32b9c593878a135594e82dc1e346943b2"} Dec 05 15:12:37 crc kubenswrapper[4840]: I1205 15:12:37.543771 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerStarted","Data":"1fa04d08026caccf2582b7d6478f12144f64c3ea28bf6ffc74c77a553ac01040"} Dec 05 15:12:37 crc kubenswrapper[4840]: I1205 15:12:37.543816 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerStarted","Data":"4f72212d0039686db79c9964a00651aedefc8208e4073eb41bc6f6badd101676"} Dec 05 15:12:37 crc kubenswrapper[4840]: I1205 15:12:37.543831 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerStarted","Data":"efb5769657db40f8fdc252d0312d368ea69142126ce403c903b55905808a1afd"} Dec 05 15:12:37 crc kubenswrapper[4840]: I1205 15:12:37.543844 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerStarted","Data":"bf1eeaf22c510712c01e91e950125d93222f4e80280957dc72ececb1efbd71b7"} Dec 05 15:12:37 crc kubenswrapper[4840]: I1205 15:12:37.543856 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerStarted","Data":"48b858f7759713760ebcb27fe4b8d0cd88977a88ffc70a33702926aa3fc272b2"} Dec 05 15:12:38 crc kubenswrapper[4840]: I1205 15:12:38.554897 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-6vbng" event={"ID":"4c47f541-43bf-400b-bb9f-e3239ad0c636","Type":"ContainerStarted","Data":"422cf09b08ea6ef46e2ce51a5dd1d8c015f5c0251e996484504402f57056ddc6"} Dec 05 15:12:38 crc kubenswrapper[4840]: I1205 15:12:38.555387 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:38 crc kubenswrapper[4840]: I1205 15:12:38.583936 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-6vbng" podStartSLOduration=5.739673193 podStartE2EDuration="14.583919638s" podCreationTimestamp="2025-12-05 15:12:24 +0000 UTC" firstStartedPulling="2025-12-05 15:12:24.948876377 +0000 UTC m=+823.289938991" lastFinishedPulling="2025-12-05 15:12:33.793122822 +0000 UTC m=+832.134185436" observedRunningTime="2025-12-05 15:12:38.574570662 +0000 UTC m=+836.915633276" watchObservedRunningTime="2025-12-05 15:12:38.583919638 +0000 UTC m=+836.924982252" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.494390 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-wxndf"] Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.495707 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wxndf" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.501341 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wxndf"] Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.502095 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.502392 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.502492 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-9v9zn" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.565602 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgc8h\" (UniqueName: \"kubernetes.io/projected/2649d8dc-24b9-4ba7-a0eb-16b0ce237750-kube-api-access-dgc8h\") pod \"openstack-operator-index-wxndf\" (UID: \"2649d8dc-24b9-4ba7-a0eb-16b0ce237750\") " pod="openstack-operators/openstack-operator-index-wxndf" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.666483 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgc8h\" (UniqueName: \"kubernetes.io/projected/2649d8dc-24b9-4ba7-a0eb-16b0ce237750-kube-api-access-dgc8h\") pod \"openstack-operator-index-wxndf\" (UID: \"2649d8dc-24b9-4ba7-a0eb-16b0ce237750\") " pod="openstack-operators/openstack-operator-index-wxndf" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.690112 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgc8h\" (UniqueName: \"kubernetes.io/projected/2649d8dc-24b9-4ba7-a0eb-16b0ce237750-kube-api-access-dgc8h\") pod \"openstack-operator-index-wxndf\" (UID: \"2649d8dc-24b9-4ba7-a0eb-16b0ce237750\") " pod="openstack-operators/openstack-operator-index-wxndf" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.772048 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.809366 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:39 crc kubenswrapper[4840]: I1205 15:12:39.822379 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wxndf" Dec 05 15:12:40 crc kubenswrapper[4840]: I1205 15:12:40.471442 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wxndf"] Dec 05 15:12:40 crc kubenswrapper[4840]: I1205 15:12:40.566045 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wxndf" event={"ID":"2649d8dc-24b9-4ba7-a0eb-16b0ce237750","Type":"ContainerStarted","Data":"df4b5ac6df7116fe32c4b2b1a93104421767f5059cf59e49faf1cd9a1e4dcfbd"} Dec 05 15:12:42 crc kubenswrapper[4840]: I1205 15:12:42.970571 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wxndf"] Dec 05 15:12:43 crc kubenswrapper[4840]: I1205 15:12:43.476074 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mvhfv"] Dec 05 15:12:43 crc kubenswrapper[4840]: I1205 15:12:43.476892 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:43 crc kubenswrapper[4840]: I1205 15:12:43.492980 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mvhfv"] Dec 05 15:12:43 crc kubenswrapper[4840]: I1205 15:12:43.673540 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rntck\" (UniqueName: \"kubernetes.io/projected/e3672e49-5b56-4666-8f46-f7846e65b4ba-kube-api-access-rntck\") pod \"openstack-operator-index-mvhfv\" (UID: \"e3672e49-5b56-4666-8f46-f7846e65b4ba\") " pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:43 crc kubenswrapper[4840]: I1205 15:12:43.775400 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rntck\" (UniqueName: \"kubernetes.io/projected/e3672e49-5b56-4666-8f46-f7846e65b4ba-kube-api-access-rntck\") pod \"openstack-operator-index-mvhfv\" (UID: \"e3672e49-5b56-4666-8f46-f7846e65b4ba\") " pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:43 crc kubenswrapper[4840]: I1205 15:12:43.862076 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rntck\" (UniqueName: \"kubernetes.io/projected/e3672e49-5b56-4666-8f46-f7846e65b4ba-kube-api-access-rntck\") pod \"openstack-operator-index-mvhfv\" (UID: \"e3672e49-5b56-4666-8f46-f7846e65b4ba\") " pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:44 crc kubenswrapper[4840]: I1205 15:12:44.148653 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:44 crc kubenswrapper[4840]: I1205 15:12:44.604786 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wxndf" event={"ID":"2649d8dc-24b9-4ba7-a0eb-16b0ce237750","Type":"ContainerStarted","Data":"c4d0d7b3d01beb333b25bbd2e9e21459eee897b9d9aa2426ad84d23d9fe58e34"} Dec 05 15:12:44 crc kubenswrapper[4840]: I1205 15:12:44.606244 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-wxndf" podUID="2649d8dc-24b9-4ba7-a0eb-16b0ce237750" containerName="registry-server" containerID="cri-o://c4d0d7b3d01beb333b25bbd2e9e21459eee897b9d9aa2426ad84d23d9fe58e34" gracePeriod=2 Dec 05 15:12:44 crc kubenswrapper[4840]: I1205 15:12:44.635765 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mvhfv"] Dec 05 15:12:44 crc kubenswrapper[4840]: I1205 15:12:44.638993 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-wxndf" podStartSLOduration=2.690219647 podStartE2EDuration="5.638964835s" podCreationTimestamp="2025-12-05 15:12:39 +0000 UTC" firstStartedPulling="2025-12-05 15:12:40.486313958 +0000 UTC m=+838.827376572" lastFinishedPulling="2025-12-05 15:12:43.435059136 +0000 UTC m=+841.776121760" observedRunningTime="2025-12-05 15:12:44.629243927 +0000 UTC m=+842.970306581" watchObservedRunningTime="2025-12-05 15:12:44.638964835 +0000 UTC m=+842.980027479" Dec 05 15:12:44 crc kubenswrapper[4840]: W1205 15:12:44.644638 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3672e49_5b56_4666_8f46_f7846e65b4ba.slice/crio-3b8b28db2f343ffc145c131f143655de0ed4e37ff170dd9238a14c438e793ad6 WatchSource:0}: Error finding container 3b8b28db2f343ffc145c131f143655de0ed4e37ff170dd9238a14c438e793ad6: Status 404 returned error can't find the container with id 3b8b28db2f343ffc145c131f143655de0ed4e37ff170dd9238a14c438e793ad6 Dec 05 15:12:44 crc kubenswrapper[4840]: I1205 15:12:44.797251 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-xhqsd" Dec 05 15:12:44 crc kubenswrapper[4840]: I1205 15:12:44.854997 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-bbxvm" Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.613047 4840 generic.go:334] "Generic (PLEG): container finished" podID="2649d8dc-24b9-4ba7-a0eb-16b0ce237750" containerID="c4d0d7b3d01beb333b25bbd2e9e21459eee897b9d9aa2426ad84d23d9fe58e34" exitCode=0 Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.613149 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wxndf" event={"ID":"2649d8dc-24b9-4ba7-a0eb-16b0ce237750","Type":"ContainerDied","Data":"c4d0d7b3d01beb333b25bbd2e9e21459eee897b9d9aa2426ad84d23d9fe58e34"} Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.613429 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wxndf" event={"ID":"2649d8dc-24b9-4ba7-a0eb-16b0ce237750","Type":"ContainerDied","Data":"df4b5ac6df7116fe32c4b2b1a93104421767f5059cf59e49faf1cd9a1e4dcfbd"} Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.613448 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df4b5ac6df7116fe32c4b2b1a93104421767f5059cf59e49faf1cd9a1e4dcfbd" Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.615259 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mvhfv" event={"ID":"e3672e49-5b56-4666-8f46-f7846e65b4ba","Type":"ContainerStarted","Data":"2d45c2860cccd8abdb0dd00988cce74d101688a34249b0e2bedcb3966b0e428a"} Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.615283 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mvhfv" event={"ID":"e3672e49-5b56-4666-8f46-f7846e65b4ba","Type":"ContainerStarted","Data":"3b8b28db2f343ffc145c131f143655de0ed4e37ff170dd9238a14c438e793ad6"} Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.631725 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wxndf" Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.640187 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mvhfv" podStartSLOduration=2.415373996 podStartE2EDuration="2.640165834s" podCreationTimestamp="2025-12-05 15:12:43 +0000 UTC" firstStartedPulling="2025-12-05 15:12:44.652443099 +0000 UTC m=+842.993505753" lastFinishedPulling="2025-12-05 15:12:44.877234977 +0000 UTC m=+843.218297591" observedRunningTime="2025-12-05 15:12:45.629599183 +0000 UTC m=+843.970661837" watchObservedRunningTime="2025-12-05 15:12:45.640165834 +0000 UTC m=+843.981228468" Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.806464 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgc8h\" (UniqueName: \"kubernetes.io/projected/2649d8dc-24b9-4ba7-a0eb-16b0ce237750-kube-api-access-dgc8h\") pod \"2649d8dc-24b9-4ba7-a0eb-16b0ce237750\" (UID: \"2649d8dc-24b9-4ba7-a0eb-16b0ce237750\") " Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.815986 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2649d8dc-24b9-4ba7-a0eb-16b0ce237750-kube-api-access-dgc8h" (OuterVolumeSpecName: "kube-api-access-dgc8h") pod "2649d8dc-24b9-4ba7-a0eb-16b0ce237750" (UID: "2649d8dc-24b9-4ba7-a0eb-16b0ce237750"). InnerVolumeSpecName "kube-api-access-dgc8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:12:45 crc kubenswrapper[4840]: I1205 15:12:45.908445 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgc8h\" (UniqueName: \"kubernetes.io/projected/2649d8dc-24b9-4ba7-a0eb-16b0ce237750-kube-api-access-dgc8h\") on node \"crc\" DevicePath \"\"" Dec 05 15:12:46 crc kubenswrapper[4840]: I1205 15:12:46.624584 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wxndf" Dec 05 15:12:46 crc kubenswrapper[4840]: I1205 15:12:46.653851 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-wxndf"] Dec 05 15:12:46 crc kubenswrapper[4840]: I1205 15:12:46.662622 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-wxndf"] Dec 05 15:12:48 crc kubenswrapper[4840]: I1205 15:12:48.078912 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2649d8dc-24b9-4ba7-a0eb-16b0ce237750" path="/var/lib/kubelet/pods/2649d8dc-24b9-4ba7-a0eb-16b0ce237750/volumes" Dec 05 15:12:54 crc kubenswrapper[4840]: I1205 15:12:54.148904 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:54 crc kubenswrapper[4840]: I1205 15:12:54.149235 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:54 crc kubenswrapper[4840]: I1205 15:12:54.188090 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:54 crc kubenswrapper[4840]: I1205 15:12:54.725985 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-mvhfv" Dec 05 15:12:54 crc kubenswrapper[4840]: I1205 15:12:54.798608 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-6vbng" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.743328 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf"] Dec 05 15:12:55 crc kubenswrapper[4840]: E1205 15:12:55.743653 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2649d8dc-24b9-4ba7-a0eb-16b0ce237750" containerName="registry-server" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.743669 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="2649d8dc-24b9-4ba7-a0eb-16b0ce237750" containerName="registry-server" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.743812 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="2649d8dc-24b9-4ba7-a0eb-16b0ce237750" containerName="registry-server" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.744986 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.748876 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-x9vq9" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.752378 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf"] Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.852085 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-bundle\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.852182 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-util\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.852406 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xbrz\" (UniqueName: \"kubernetes.io/projected/7a20aa55-efd0-473a-9582-e7812dd599ab-kube-api-access-7xbrz\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.954747 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-bundle\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.955262 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-util\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.955616 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xbrz\" (UniqueName: \"kubernetes.io/projected/7a20aa55-efd0-473a-9582-e7812dd599ab-kube-api-access-7xbrz\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.955813 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-util\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.956145 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-bundle\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:55 crc kubenswrapper[4840]: I1205 15:12:55.995217 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xbrz\" (UniqueName: \"kubernetes.io/projected/7a20aa55-efd0-473a-9582-e7812dd599ab-kube-api-access-7xbrz\") pod \"8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:56 crc kubenswrapper[4840]: I1205 15:12:56.064649 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:12:56 crc kubenswrapper[4840]: I1205 15:12:56.520262 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf"] Dec 05 15:12:56 crc kubenswrapper[4840]: I1205 15:12:56.709790 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" event={"ID":"7a20aa55-efd0-473a-9582-e7812dd599ab","Type":"ContainerStarted","Data":"3b6225d1186be29fad55733478959758551f6f7ee13eb766c662881a557c81b1"} Dec 05 15:12:56 crc kubenswrapper[4840]: I1205 15:12:56.710097 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" event={"ID":"7a20aa55-efd0-473a-9582-e7812dd599ab","Type":"ContainerStarted","Data":"cc71eb6812f263ed87852e2a8182b3f0634745a251f401618b02d0fb89df7252"} Dec 05 15:12:57 crc kubenswrapper[4840]: I1205 15:12:57.720631 4840 generic.go:334] "Generic (PLEG): container finished" podID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerID="3b6225d1186be29fad55733478959758551f6f7ee13eb766c662881a557c81b1" exitCode=0 Dec 05 15:12:57 crc kubenswrapper[4840]: I1205 15:12:57.720724 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" event={"ID":"7a20aa55-efd0-473a-9582-e7812dd599ab","Type":"ContainerDied","Data":"3b6225d1186be29fad55733478959758551f6f7ee13eb766c662881a557c81b1"} Dec 05 15:12:58 crc kubenswrapper[4840]: I1205 15:12:58.729597 4840 generic.go:334] "Generic (PLEG): container finished" podID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerID="32944332c7964719c53c08186380c1d2f349278203e419cb5edcebf7fe2dea86" exitCode=0 Dec 05 15:12:58 crc kubenswrapper[4840]: I1205 15:12:58.729691 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" event={"ID":"7a20aa55-efd0-473a-9582-e7812dd599ab","Type":"ContainerDied","Data":"32944332c7964719c53c08186380c1d2f349278203e419cb5edcebf7fe2dea86"} Dec 05 15:12:59 crc kubenswrapper[4840]: I1205 15:12:59.741035 4840 generic.go:334] "Generic (PLEG): container finished" podID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerID="3564d72c33571804c450fa6594b20491a2f0e6241a3ccee4624b4344a13d0d8b" exitCode=0 Dec 05 15:12:59 crc kubenswrapper[4840]: I1205 15:12:59.741108 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" event={"ID":"7a20aa55-efd0-473a-9582-e7812dd599ab","Type":"ContainerDied","Data":"3564d72c33571804c450fa6594b20491a2f0e6241a3ccee4624b4344a13d0d8b"} Dec 05 15:13:00 crc kubenswrapper[4840]: I1205 15:13:00.979755 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.125812 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-util\") pod \"7a20aa55-efd0-473a-9582-e7812dd599ab\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.125935 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xbrz\" (UniqueName: \"kubernetes.io/projected/7a20aa55-efd0-473a-9582-e7812dd599ab-kube-api-access-7xbrz\") pod \"7a20aa55-efd0-473a-9582-e7812dd599ab\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.125999 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-bundle\") pod \"7a20aa55-efd0-473a-9582-e7812dd599ab\" (UID: \"7a20aa55-efd0-473a-9582-e7812dd599ab\") " Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.126723 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-bundle" (OuterVolumeSpecName: "bundle") pod "7a20aa55-efd0-473a-9582-e7812dd599ab" (UID: "7a20aa55-efd0-473a-9582-e7812dd599ab"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.126942 4840 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.138317 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a20aa55-efd0-473a-9582-e7812dd599ab-kube-api-access-7xbrz" (OuterVolumeSpecName: "kube-api-access-7xbrz") pod "7a20aa55-efd0-473a-9582-e7812dd599ab" (UID: "7a20aa55-efd0-473a-9582-e7812dd599ab"). InnerVolumeSpecName "kube-api-access-7xbrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.139976 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-util" (OuterVolumeSpecName: "util") pod "7a20aa55-efd0-473a-9582-e7812dd599ab" (UID: "7a20aa55-efd0-473a-9582-e7812dd599ab"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.228472 4840 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7a20aa55-efd0-473a-9582-e7812dd599ab-util\") on node \"crc\" DevicePath \"\"" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.228966 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xbrz\" (UniqueName: \"kubernetes.io/projected/7a20aa55-efd0-473a-9582-e7812dd599ab-kube-api-access-7xbrz\") on node \"crc\" DevicePath \"\"" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.759793 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" event={"ID":"7a20aa55-efd0-473a-9582-e7812dd599ab","Type":"ContainerDied","Data":"cc71eb6812f263ed87852e2a8182b3f0634745a251f401618b02d0fb89df7252"} Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.759914 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc71eb6812f263ed87852e2a8182b3f0634745a251f401618b02d0fb89df7252" Dec 05 15:13:01 crc kubenswrapper[4840]: I1205 15:13:01.759971 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.622164 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq"] Dec 05 15:13:03 crc kubenswrapper[4840]: E1205 15:13:03.622779 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerName="util" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.622795 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerName="util" Dec 05 15:13:03 crc kubenswrapper[4840]: E1205 15:13:03.622814 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerName="pull" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.622822 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerName="pull" Dec 05 15:13:03 crc kubenswrapper[4840]: E1205 15:13:03.622837 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerName="extract" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.622845 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerName="extract" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.622999 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a20aa55-efd0-473a-9582-e7812dd599ab" containerName="extract" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.623545 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.625647 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qhksg" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.642226 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq"] Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.660775 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9z6w\" (UniqueName: \"kubernetes.io/projected/6251ea61-296a-4fe0-b2a2-c6de82a74d33-kube-api-access-m9z6w\") pod \"openstack-operator-controller-operator-7f5df65589-88fmq\" (UID: \"6251ea61-296a-4fe0-b2a2-c6de82a74d33\") " pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.761929 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9z6w\" (UniqueName: \"kubernetes.io/projected/6251ea61-296a-4fe0-b2a2-c6de82a74d33-kube-api-access-m9z6w\") pod \"openstack-operator-controller-operator-7f5df65589-88fmq\" (UID: \"6251ea61-296a-4fe0-b2a2-c6de82a74d33\") " pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.783206 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9z6w\" (UniqueName: \"kubernetes.io/projected/6251ea61-296a-4fe0-b2a2-c6de82a74d33-kube-api-access-m9z6w\") pod \"openstack-operator-controller-operator-7f5df65589-88fmq\" (UID: \"6251ea61-296a-4fe0-b2a2-c6de82a74d33\") " pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" Dec 05 15:13:03 crc kubenswrapper[4840]: I1205 15:13:03.942958 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" Dec 05 15:13:04 crc kubenswrapper[4840]: I1205 15:13:04.410682 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq"] Dec 05 15:13:04 crc kubenswrapper[4840]: I1205 15:13:04.784132 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" event={"ID":"6251ea61-296a-4fe0-b2a2-c6de82a74d33","Type":"ContainerStarted","Data":"2b4870644f33d08697db64fa46123d4cee234ddb33cabe0207a25190bdf935c2"} Dec 05 15:13:10 crc kubenswrapper[4840]: I1205 15:13:10.980191 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" event={"ID":"6251ea61-296a-4fe0-b2a2-c6de82a74d33","Type":"ContainerStarted","Data":"5b64e6b882286c827e4fcd60efe92dc8fed9e070019df75557a5c5db79c29278"} Dec 05 15:13:10 crc kubenswrapper[4840]: I1205 15:13:10.980728 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" Dec 05 15:13:11 crc kubenswrapper[4840]: I1205 15:13:11.009426 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" podStartSLOduration=1.9364981060000002 podStartE2EDuration="8.009400285s" podCreationTimestamp="2025-12-05 15:13:03 +0000 UTC" firstStartedPulling="2025-12-05 15:13:04.419582578 +0000 UTC m=+862.760645192" lastFinishedPulling="2025-12-05 15:13:10.492484757 +0000 UTC m=+868.833547371" observedRunningTime="2025-12-05 15:13:11.001733888 +0000 UTC m=+869.342796502" watchObservedRunningTime="2025-12-05 15:13:11.009400285 +0000 UTC m=+869.350462929" Dec 05 15:13:23 crc kubenswrapper[4840]: I1205 15:13:23.946536 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7f5df65589-88fmq" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.383987 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.385907 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.388392 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-kglr4" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.391011 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.392188 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.392416 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqsfl\" (UniqueName: \"kubernetes.io/projected/c79f46e0-5947-4b4a-b581-0e49736fb41f-kube-api-access-lqsfl\") pod \"barbican-operator-controller-manager-7d9dfd778-7zmt7\" (UID: \"c79f46e0-5947-4b4a-b581-0e49736fb41f\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.399348 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-6mzvz" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.416635 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.417746 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.420125 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-2tq97" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.423469 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.428684 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.429913 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.440919 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-7v4cw" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.473234 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.481941 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.492333 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.493502 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqsfl\" (UniqueName: \"kubernetes.io/projected/c79f46e0-5947-4b4a-b581-0e49736fb41f-kube-api-access-lqsfl\") pod \"barbican-operator-controller-manager-7d9dfd778-7zmt7\" (UID: \"c79f46e0-5947-4b4a-b581-0e49736fb41f\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.493570 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdvdh\" (UniqueName: \"kubernetes.io/projected/9f897b64-3aeb-44c6-a340-9e0082876e93-kube-api-access-vdvdh\") pod \"designate-operator-controller-manager-78b4bc895b-8vw7g\" (UID: \"9f897b64-3aeb-44c6-a340-9e0082876e93\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.493613 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfcp6\" (UniqueName: \"kubernetes.io/projected/c15ac393-953d-45e9-b8dc-7212c6e2366b-kube-api-access-wfcp6\") pod \"glance-operator-controller-manager-77987cd8cd-fwvd2\" (UID: \"c15ac393-953d-45e9-b8dc-7212c6e2366b\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.493644 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4csdn\" (UniqueName: \"kubernetes.io/projected/94defa3c-b83c-44b9-83c0-e92bdf7944be-kube-api-access-4csdn\") pod \"cinder-operator-controller-manager-859b6ccc6-dcbzq\" (UID: \"94defa3c-b83c-44b9-83c0-e92bdf7944be\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.517902 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.518818 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.526571 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-jzpxx" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.527339 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqsfl\" (UniqueName: \"kubernetes.io/projected/c79f46e0-5947-4b4a-b581-0e49736fb41f-kube-api-access-lqsfl\") pod \"barbican-operator-controller-manager-7d9dfd778-7zmt7\" (UID: \"c79f46e0-5947-4b4a-b581-0e49736fb41f\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.530954 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.532137 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.545949 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-p6mhj" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.561101 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.562237 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.567047 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.577251 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-729zp" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.588030 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.594447 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdvdh\" (UniqueName: \"kubernetes.io/projected/9f897b64-3aeb-44c6-a340-9e0082876e93-kube-api-access-vdvdh\") pod \"designate-operator-controller-manager-78b4bc895b-8vw7g\" (UID: \"9f897b64-3aeb-44c6-a340-9e0082876e93\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.594520 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqbxs\" (UniqueName: \"kubernetes.io/projected/ff2da961-d2c6-486f-87bf-2394ee00a5a1-kube-api-access-sqbxs\") pod \"horizon-operator-controller-manager-68c6d99b8f-fmbzp\" (UID: \"ff2da961-d2c6-486f-87bf-2394ee00a5a1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.594547 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfcp6\" (UniqueName: \"kubernetes.io/projected/c15ac393-953d-45e9-b8dc-7212c6e2366b-kube-api-access-wfcp6\") pod \"glance-operator-controller-manager-77987cd8cd-fwvd2\" (UID: \"c15ac393-953d-45e9-b8dc-7212c6e2366b\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.594573 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhzjh\" (UniqueName: \"kubernetes.io/projected/8e7074a0-bae6-49e7-8915-c4cb3242108d-kube-api-access-bhzjh\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.594603 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4csdn\" (UniqueName: \"kubernetes.io/projected/94defa3c-b83c-44b9-83c0-e92bdf7944be-kube-api-access-4csdn\") pod \"cinder-operator-controller-manager-859b6ccc6-dcbzq\" (UID: \"94defa3c-b83c-44b9-83c0-e92bdf7944be\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.594630 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncmwh\" (UniqueName: \"kubernetes.io/projected/546f9401-ad92-49f1-836a-8e240bbc2d61-kube-api-access-ncmwh\") pod \"heat-operator-controller-manager-5f64f6f8bb-qz4dx\" (UID: \"546f9401-ad92-49f1-836a-8e240bbc2d61\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.594652 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.600261 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.625835 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.627579 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4csdn\" (UniqueName: \"kubernetes.io/projected/94defa3c-b83c-44b9-83c0-e92bdf7944be-kube-api-access-4csdn\") pod \"cinder-operator-controller-manager-859b6ccc6-dcbzq\" (UID: \"94defa3c-b83c-44b9-83c0-e92bdf7944be\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.627958 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfcp6\" (UniqueName: \"kubernetes.io/projected/c15ac393-953d-45e9-b8dc-7212c6e2366b-kube-api-access-wfcp6\") pod \"glance-operator-controller-manager-77987cd8cd-fwvd2\" (UID: \"c15ac393-953d-45e9-b8dc-7212c6e2366b\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.632260 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdvdh\" (UniqueName: \"kubernetes.io/projected/9f897b64-3aeb-44c6-a340-9e0082876e93-kube-api-access-vdvdh\") pod \"designate-operator-controller-manager-78b4bc895b-8vw7g\" (UID: \"9f897b64-3aeb-44c6-a340-9e0082876e93\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.640998 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.641961 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.647393 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-9mjxx" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.668180 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.688918 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.690092 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.700231 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-cjc7g" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.700440 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncmwh\" (UniqueName: \"kubernetes.io/projected/546f9401-ad92-49f1-836a-8e240bbc2d61-kube-api-access-ncmwh\") pod \"heat-operator-controller-manager-5f64f6f8bb-qz4dx\" (UID: \"546f9401-ad92-49f1-836a-8e240bbc2d61\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.700482 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.700560 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqbxs\" (UniqueName: \"kubernetes.io/projected/ff2da961-d2c6-486f-87bf-2394ee00a5a1-kube-api-access-sqbxs\") pod \"horizon-operator-controller-manager-68c6d99b8f-fmbzp\" (UID: \"ff2da961-d2c6-486f-87bf-2394ee00a5a1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.700581 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhzjh\" (UniqueName: \"kubernetes.io/projected/8e7074a0-bae6-49e7-8915-c4cb3242108d-kube-api-access-bhzjh\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:43 crc kubenswrapper[4840]: E1205 15:13:43.700941 4840 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:43 crc kubenswrapper[4840]: E1205 15:13:43.700982 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert podName:8e7074a0-bae6-49e7-8915-c4cb3242108d nodeName:}" failed. No retries permitted until 2025-12-05 15:13:44.200966888 +0000 UTC m=+902.542029502 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert") pod "infra-operator-controller-manager-57548d458d-gpcmz" (UID: "8e7074a0-bae6-49e7-8915-c4cb3242108d") : secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.706434 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.711396 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.712469 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.715210 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-kf5h9" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.726441 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.738764 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.747373 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.752279 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.752374 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqbxs\" (UniqueName: \"kubernetes.io/projected/ff2da961-d2c6-486f-87bf-2394ee00a5a1-kube-api-access-sqbxs\") pod \"horizon-operator-controller-manager-68c6d99b8f-fmbzp\" (UID: \"ff2da961-d2c6-486f-87bf-2394ee00a5a1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.753819 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncmwh\" (UniqueName: \"kubernetes.io/projected/546f9401-ad92-49f1-836a-8e240bbc2d61-kube-api-access-ncmwh\") pod \"heat-operator-controller-manager-5f64f6f8bb-qz4dx\" (UID: \"546f9401-ad92-49f1-836a-8e240bbc2d61\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.770633 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.772642 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhzjh\" (UniqueName: \"kubernetes.io/projected/8e7074a0-bae6-49e7-8915-c4cb3242108d-kube-api-access-bhzjh\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.843959 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.870076 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh6rh\" (UniqueName: \"kubernetes.io/projected/683c5938-459a-4c60-bb98-8237f6ddc4f6-kube-api-access-vh6rh\") pod \"manila-operator-controller-manager-7c79b5df47-n6chf\" (UID: \"683c5938-459a-4c60-bb98-8237f6ddc4f6\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.870139 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h9hz\" (UniqueName: \"kubernetes.io/projected/48a2075e-30bb-41fb-a311-fb9b593182c6-kube-api-access-6h9hz\") pod \"ironic-operator-controller-manager-6c548fd776-q2pp7\" (UID: \"48a2075e-30bb-41fb-a311-fb9b593182c6\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.870202 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qbp2\" (UniqueName: \"kubernetes.io/projected/6edb6d08-9885-457f-8642-ef77c64de97a-kube-api-access-4qbp2\") pod \"keystone-operator-controller-manager-7765d96ddf-zjbwl\" (UID: \"6edb6d08-9885-457f-8642-ef77c64de97a\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.898139 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.899120 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.899677 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.907470 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-fdwhs" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.908354 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.910116 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.912586 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-brvh5" Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.967293 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr"] Dec 05 15:13:43 crc kubenswrapper[4840]: I1205 15:13:43.968794 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.010896 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frcbg\" (UniqueName: \"kubernetes.io/projected/3b263984-5a9b-45eb-886b-b8209ada6a7a-kube-api-access-frcbg\") pod \"mariadb-operator-controller-manager-56bbcc9d85-hvc5g\" (UID: \"3b263984-5a9b-45eb-886b-b8209ada6a7a\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.010949 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpgxf\" (UniqueName: \"kubernetes.io/projected/ae41b596-75b2-46ab-b95a-ef7b41f1e66b-kube-api-access-cpgxf\") pod \"nova-operator-controller-manager-697bc559fc-spqrr\" (UID: \"ae41b596-75b2-46ab-b95a-ef7b41f1e66b\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.011029 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh6rh\" (UniqueName: \"kubernetes.io/projected/683c5938-459a-4c60-bb98-8237f6ddc4f6-kube-api-access-vh6rh\") pod \"manila-operator-controller-manager-7c79b5df47-n6chf\" (UID: \"683c5938-459a-4c60-bb98-8237f6ddc4f6\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.011070 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h9hz\" (UniqueName: \"kubernetes.io/projected/48a2075e-30bb-41fb-a311-fb9b593182c6-kube-api-access-6h9hz\") pod \"ironic-operator-controller-manager-6c548fd776-q2pp7\" (UID: \"48a2075e-30bb-41fb-a311-fb9b593182c6\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.011133 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qbp2\" (UniqueName: \"kubernetes.io/projected/6edb6d08-9885-457f-8642-ef77c64de97a-kube-api-access-4qbp2\") pod \"keystone-operator-controller-manager-7765d96ddf-zjbwl\" (UID: \"6edb6d08-9885-457f-8642-ef77c64de97a\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.011170 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqpkd\" (UniqueName: \"kubernetes.io/projected/cdf9b744-368d-4c91-8ecf-6a5d983f3eb7-kube-api-access-xqpkd\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-5v888\" (UID: \"cdf9b744-368d-4c91-8ecf-6a5d983f3eb7\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.033195 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.188080 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqpkd\" (UniqueName: \"kubernetes.io/projected/cdf9b744-368d-4c91-8ecf-6a5d983f3eb7-kube-api-access-xqpkd\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-5v888\" (UID: \"cdf9b744-368d-4c91-8ecf-6a5d983f3eb7\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.188147 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frcbg\" (UniqueName: \"kubernetes.io/projected/3b263984-5a9b-45eb-886b-b8209ada6a7a-kube-api-access-frcbg\") pod \"mariadb-operator-controller-manager-56bbcc9d85-hvc5g\" (UID: \"3b263984-5a9b-45eb-886b-b8209ada6a7a\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.188173 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpgxf\" (UniqueName: \"kubernetes.io/projected/ae41b596-75b2-46ab-b95a-ef7b41f1e66b-kube-api-access-cpgxf\") pod \"nova-operator-controller-manager-697bc559fc-spqrr\" (UID: \"ae41b596-75b2-46ab-b95a-ef7b41f1e66b\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.188829 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qbp2\" (UniqueName: \"kubernetes.io/projected/6edb6d08-9885-457f-8642-ef77c64de97a-kube-api-access-4qbp2\") pod \"keystone-operator-controller-manager-7765d96ddf-zjbwl\" (UID: \"6edb6d08-9885-457f-8642-ef77c64de97a\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.189347 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.190784 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-tsz4v" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.244174 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h9hz\" (UniqueName: \"kubernetes.io/projected/48a2075e-30bb-41fb-a311-fb9b593182c6-kube-api-access-6h9hz\") pod \"ironic-operator-controller-manager-6c548fd776-q2pp7\" (UID: \"48a2075e-30bb-41fb-a311-fb9b593182c6\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.303162 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frcbg\" (UniqueName: \"kubernetes.io/projected/3b263984-5a9b-45eb-886b-b8209ada6a7a-kube-api-access-frcbg\") pod \"mariadb-operator-controller-manager-56bbcc9d85-hvc5g\" (UID: \"3b263984-5a9b-45eb-886b-b8209ada6a7a\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.304165 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqpkd\" (UniqueName: \"kubernetes.io/projected/cdf9b744-368d-4c91-8ecf-6a5d983f3eb7-kube-api-access-xqpkd\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-5v888\" (UID: \"cdf9b744-368d-4c91-8ecf-6a5d983f3eb7\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.316375 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpgxf\" (UniqueName: \"kubernetes.io/projected/ae41b596-75b2-46ab-b95a-ef7b41f1e66b-kube-api-access-cpgxf\") pod \"nova-operator-controller-manager-697bc559fc-spqrr\" (UID: \"ae41b596-75b2-46ab-b95a-ef7b41f1e66b\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.369520 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.373815 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh6rh\" (UniqueName: \"kubernetes.io/projected/683c5938-459a-4c60-bb98-8237f6ddc4f6-kube-api-access-vh6rh\") pod \"manila-operator-controller-manager-7c79b5df47-n6chf\" (UID: \"683c5938-459a-4c60-bb98-8237f6ddc4f6\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.456375 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.457579 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.458706 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" Dec 05 15:13:44 crc kubenswrapper[4840]: E1205 15:13:44.460625 4840 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:44 crc kubenswrapper[4840]: E1205 15:13:44.460677 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert podName:8e7074a0-bae6-49e7-8915-c4cb3242108d nodeName:}" failed. No retries permitted until 2025-12-05 15:13:45.460661751 +0000 UTC m=+903.801724365 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert") pod "infra-operator-controller-manager-57548d458d-gpcmz" (UID: "8e7074a0-bae6-49e7-8915-c4cb3242108d") : secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.470323 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.470359 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-xgflv"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.471653 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.476602 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.506395 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-xgflv"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.560994 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-ssr9q" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.561064 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.561735 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.590905 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.596104 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.600147 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-xvtwk" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.605831 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.618290 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.619385 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.624232 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.625241 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.629321 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.629627 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-bbjtr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.664294 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-462h4\" (UniqueName: \"kubernetes.io/projected/6c27d60c-a8e1-4616-88b2-391876d4112d-kube-api-access-462h4\") pod \"octavia-operator-controller-manager-998648c74-xgflv\" (UID: \"6c27d60c-a8e1-4616-88b2-391876d4112d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.675155 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.697479 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-q4jqs" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.718693 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.719750 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.735243 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.738271 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-ppm2h" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.753918 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.754968 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.756259 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.757015 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.757346 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-dld8b" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.758891 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-df8c6" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.762587 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.765252 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7w55\" (UniqueName: \"kubernetes.io/projected/a18cf365-d0a8-4750-b11e-12d608ceb0e9-kube-api-access-g7w55\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.765292 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.765372 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-462h4\" (UniqueName: \"kubernetes.io/projected/6c27d60c-a8e1-4616-88b2-391876d4112d-kube-api-access-462h4\") pod \"octavia-operator-controller-manager-998648c74-xgflv\" (UID: \"6c27d60c-a8e1-4616-88b2-391876d4112d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.765439 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csxk6\" (UniqueName: \"kubernetes.io/projected/387ba01a-b726-4c0c-b3ab-160be43d9587-kube-api-access-csxk6\") pod \"ovn-operator-controller-manager-b6456fdb6-q5ksl\" (UID: \"387ba01a-b726-4c0c-b3ab-160be43d9587\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.765479 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkm9f\" (UniqueName: \"kubernetes.io/projected/31eda6ed-1dee-4670-a6d3-22871423db53-kube-api-access-zkm9f\") pod \"placement-operator-controller-manager-78f8948974-8jvnm\" (UID: \"31eda6ed-1dee-4670-a6d3-22871423db53\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.766201 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.784739 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.788064 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.792104 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-qm4wl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.802809 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.810489 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-462h4\" (UniqueName: \"kubernetes.io/projected/6c27d60c-a8e1-4616-88b2-391876d4112d-kube-api-access-462h4\") pod \"octavia-operator-controller-manager-998648c74-xgflv\" (UID: \"6c27d60c-a8e1-4616-88b2-391876d4112d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.816657 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.866543 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf62d\" (UniqueName: \"kubernetes.io/projected/919c5760-f9dd-4b40-9b91-ea3b11d13a26-kube-api-access-qf62d\") pod \"swift-operator-controller-manager-5f8c65bbfc-fvrzr\" (UID: \"919c5760-f9dd-4b40-9b91-ea3b11d13a26\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.866835 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csxk6\" (UniqueName: \"kubernetes.io/projected/387ba01a-b726-4c0c-b3ab-160be43d9587-kube-api-access-csxk6\") pod \"ovn-operator-controller-manager-b6456fdb6-q5ksl\" (UID: \"387ba01a-b726-4c0c-b3ab-160be43d9587\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.866877 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkm9f\" (UniqueName: \"kubernetes.io/projected/31eda6ed-1dee-4670-a6d3-22871423db53-kube-api-access-zkm9f\") pod \"placement-operator-controller-manager-78f8948974-8jvnm\" (UID: \"31eda6ed-1dee-4670-a6d3-22871423db53\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.866900 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trqkz\" (UniqueName: \"kubernetes.io/projected/f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08-kube-api-access-trqkz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-spvpr\" (UID: \"f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.866936 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7w55\" (UniqueName: \"kubernetes.io/projected/a18cf365-d0a8-4750-b11e-12d608ceb0e9-kube-api-access-g7w55\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.866957 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.866981 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbp5b\" (UniqueName: \"kubernetes.io/projected/df20a713-1c9f-4738-8401-ddff0dcf0c38-kube-api-access-vbp5b\") pod \"test-operator-controller-manager-5854674fcc-g5t4m\" (UID: \"df20a713-1c9f-4738-8401-ddff0dcf0c38\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" Dec 05 15:13:44 crc kubenswrapper[4840]: E1205 15:13:44.872277 4840 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:44 crc kubenswrapper[4840]: E1205 15:13:44.872327 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert podName:a18cf365-d0a8-4750-b11e-12d608ceb0e9 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:45.372311598 +0000 UTC m=+903.713374212 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" (UID: "a18cf365-d0a8-4750-b11e-12d608ceb0e9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.898639 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkm9f\" (UniqueName: \"kubernetes.io/projected/31eda6ed-1dee-4670-a6d3-22871423db53-kube-api-access-zkm9f\") pod \"placement-operator-controller-manager-78f8948974-8jvnm\" (UID: \"31eda6ed-1dee-4670-a6d3-22871423db53\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.915108 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7w55\" (UniqueName: \"kubernetes.io/projected/a18cf365-d0a8-4750-b11e-12d608ceb0e9-kube-api-access-g7w55\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.938476 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8"] Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.948295 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csxk6\" (UniqueName: \"kubernetes.io/projected/387ba01a-b726-4c0c-b3ab-160be43d9587-kube-api-access-csxk6\") pod \"ovn-operator-controller-manager-b6456fdb6-q5ksl\" (UID: \"387ba01a-b726-4c0c-b3ab-160be43d9587\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.950276 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.955214 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.955417 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.955641 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-8bx5z" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.968215 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbp5b\" (UniqueName: \"kubernetes.io/projected/df20a713-1c9f-4738-8401-ddff0dcf0c38-kube-api-access-vbp5b\") pod \"test-operator-controller-manager-5854674fcc-g5t4m\" (UID: \"df20a713-1c9f-4738-8401-ddff0dcf0c38\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.968267 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbzdh\" (UniqueName: \"kubernetes.io/projected/53e9bbdb-0559-4053-b38c-395876f9d69f-kube-api-access-jbzdh\") pod \"watcher-operator-controller-manager-769dc69bc-chg27\" (UID: \"53e9bbdb-0559-4053-b38c-395876f9d69f\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.968311 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf62d\" (UniqueName: \"kubernetes.io/projected/919c5760-f9dd-4b40-9b91-ea3b11d13a26-kube-api-access-qf62d\") pod \"swift-operator-controller-manager-5f8c65bbfc-fvrzr\" (UID: \"919c5760-f9dd-4b40-9b91-ea3b11d13a26\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.968377 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trqkz\" (UniqueName: \"kubernetes.io/projected/f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08-kube-api-access-trqkz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-spvpr\" (UID: \"f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" Dec 05 15:13:44 crc kubenswrapper[4840]: I1205 15:13:44.974572 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8"] Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.005750 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.019721 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trqkz\" (UniqueName: \"kubernetes.io/projected/f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08-kube-api-access-trqkz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-spvpr\" (UID: \"f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.021250 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf62d\" (UniqueName: \"kubernetes.io/projected/919c5760-f9dd-4b40-9b91-ea3b11d13a26-kube-api-access-qf62d\") pod \"swift-operator-controller-manager-5f8c65bbfc-fvrzr\" (UID: \"919c5760-f9dd-4b40-9b91-ea3b11d13a26\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.035363 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbp5b\" (UniqueName: \"kubernetes.io/projected/df20a713-1c9f-4738-8401-ddff0dcf0c38-kube-api-access-vbp5b\") pod \"test-operator-controller-manager-5854674fcc-g5t4m\" (UID: \"df20a713-1c9f-4738-8401-ddff0dcf0c38\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.054500 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9"] Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.057434 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.060703 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-lt58p" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.073944 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hggv\" (UniqueName: \"kubernetes.io/projected/187efc3a-77ce-4898-89d9-5785491d5d29-kube-api-access-6hggv\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.074196 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.074272 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbzdh\" (UniqueName: \"kubernetes.io/projected/53e9bbdb-0559-4053-b38c-395876f9d69f-kube-api-access-jbzdh\") pod \"watcher-operator-controller-manager-769dc69bc-chg27\" (UID: \"53e9bbdb-0559-4053-b38c-395876f9d69f\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.074325 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.092050 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.093416 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbzdh\" (UniqueName: \"kubernetes.io/projected/53e9bbdb-0559-4053-b38c-395876f9d69f-kube-api-access-jbzdh\") pod \"watcher-operator-controller-manager-769dc69bc-chg27\" (UID: \"53e9bbdb-0559-4053-b38c-395876f9d69f\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.101071 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.161705 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.172014 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.187838 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.224725 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqzpv\" (UniqueName: \"kubernetes.io/projected/e1dca245-f390-4f32-8683-eea98ad3fb45-kube-api-access-kqzpv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-r2br9\" (UID: \"e1dca245-f390-4f32-8683-eea98ad3fb45\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.224933 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.225910 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9"] Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.231014 4840 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.231094 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:45.731067787 +0000 UTC m=+904.072130401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "metrics-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.231995 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.233311 4840 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.233355 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hggv\" (UniqueName: \"kubernetes.io/projected/187efc3a-77ce-4898-89d9-5785491d5d29-kube-api-access-6hggv\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.233376 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:45.733358842 +0000 UTC m=+904.074421456 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "webhook-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.367447 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqzpv\" (UniqueName: \"kubernetes.io/projected/e1dca245-f390-4f32-8683-eea98ad3fb45-kube-api-access-kqzpv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-r2br9\" (UID: \"e1dca245-f390-4f32-8683-eea98ad3fb45\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.413678 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hggv\" (UniqueName: \"kubernetes.io/projected/187efc3a-77ce-4898-89d9-5785491d5d29-kube-api-access-6hggv\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.414182 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqzpv\" (UniqueName: \"kubernetes.io/projected/e1dca245-f390-4f32-8683-eea98ad3fb45-kube-api-access-kqzpv\") pod \"rabbitmq-cluster-operator-manager-668c99d594-r2br9\" (UID: \"e1dca245-f390-4f32-8683-eea98ad3fb45\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.423544 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.481562 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.481645 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.482004 4840 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.482045 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert podName:a18cf365-d0a8-4750-b11e-12d608ceb0e9 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:46.482033214 +0000 UTC m=+904.823095828 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" (UID: "a18cf365-d0a8-4750-b11e-12d608ceb0e9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.482174 4840 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.482245 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert podName:8e7074a0-bae6-49e7-8915-c4cb3242108d nodeName:}" failed. No retries permitted until 2025-12-05 15:13:47.482224659 +0000 UTC m=+905.823287333 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert") pod "infra-operator-controller-manager-57548d458d-gpcmz" (UID: "8e7074a0-bae6-49e7-8915-c4cb3242108d") : secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.491022 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.787533 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.787758 4840 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.788146 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:46.788120241 +0000 UTC m=+905.129182865 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "metrics-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: I1205 15:13:45.788057 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.788367 4840 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 15:13:45 crc kubenswrapper[4840]: E1205 15:13:45.788625 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:46.788600785 +0000 UTC m=+905.129663409 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "webhook-server-cert" not found Dec 05 15:13:46 crc kubenswrapper[4840]: I1205 15:13:46.563369 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:46 crc kubenswrapper[4840]: E1205 15:13:46.563578 4840 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:46 crc kubenswrapper[4840]: E1205 15:13:46.564308 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert podName:a18cf365-d0a8-4750-b11e-12d608ceb0e9 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:48.564280149 +0000 UTC m=+906.905342763 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" (UID: "a18cf365-d0a8-4750-b11e-12d608ceb0e9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:46 crc kubenswrapper[4840]: I1205 15:13:46.818539 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7"] Dec 05 15:13:46 crc kubenswrapper[4840]: I1205 15:13:46.879812 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:46 crc kubenswrapper[4840]: I1205 15:13:46.879913 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:46 crc kubenswrapper[4840]: E1205 15:13:46.880085 4840 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 15:13:46 crc kubenswrapper[4840]: E1205 15:13:46.880138 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:48.880116633 +0000 UTC m=+907.221179247 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "webhook-server-cert" not found Dec 05 15:13:46 crc kubenswrapper[4840]: E1205 15:13:46.880524 4840 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 15:13:46 crc kubenswrapper[4840]: E1205 15:13:46.880611 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:48.880590546 +0000 UTC m=+907.221653250 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "metrics-server-cert" not found Dec 05 15:13:46 crc kubenswrapper[4840]: I1205 15:13:46.901974 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2"] Dec 05 15:13:46 crc kubenswrapper[4840]: I1205 15:13:46.920264 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.047998 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.094516 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp"] Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.098440 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff2da961_d2c6_486f_87bf_2394ee00a5a1.slice/crio-0beead8ab1f8fe263861c2edca6f762e68501a6fc8ffde91ef4cbc0fd597bb1d WatchSource:0}: Error finding container 0beead8ab1f8fe263861c2edca6f762e68501a6fc8ffde91ef4cbc0fd597bb1d: Status 404 returned error can't find the container with id 0beead8ab1f8fe263861c2edca6f762e68501a6fc8ffde91ef4cbc0fd597bb1d Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.108135 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.208009 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888"] Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.209310 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6edb6d08_9885_457f_8642_ef77c64de97a.slice/crio-9bfae4b5a41a7625f64f0d2534035b858415749e93d359102a1970c497864307 WatchSource:0}: Error finding container 9bfae4b5a41a7625f64f0d2534035b858415749e93d359102a1970c497864307: Status 404 returned error can't find the container with id 9bfae4b5a41a7625f64f0d2534035b858415749e93d359102a1970c497864307 Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.209830 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.219430 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7"] Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.223369 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48a2075e_30bb_41fb_a311_fb9b593182c6.slice/crio-fbf566bbd59495dced4b7357e52912c3f3c370e4103262e4aad6b8e2d524e69e WatchSource:0}: Error finding container fbf566bbd59495dced4b7357e52912c3f3c370e4103262e4aad6b8e2d524e69e: Status 404 returned error can't find the container with id fbf566bbd59495dced4b7357e52912c3f3c370e4103262e4aad6b8e2d524e69e Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.223960 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf"] Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.224957 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod683c5938_459a_4c60_bb98_8237f6ddc4f6.slice/crio-13bb0b046dfff303d6eb127cf9cd11e65de979ab6ad5bf7d27c44a41ed6435a8 WatchSource:0}: Error finding container 13bb0b046dfff303d6eb127cf9cd11e65de979ab6ad5bf7d27c44a41ed6435a8: Status 404 returned error can't find the container with id 13bb0b046dfff303d6eb127cf9cd11e65de979ab6ad5bf7d27c44a41ed6435a8 Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.269258 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" event={"ID":"94defa3c-b83c-44b9-83c0-e92bdf7944be","Type":"ContainerStarted","Data":"fc7df8cc65c8bb933fa52ce46b05072b96dca638ec15899486caef2d0986c48b"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.270560 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" event={"ID":"546f9401-ad92-49f1-836a-8e240bbc2d61","Type":"ContainerStarted","Data":"7de2e6e538dfd6678459507df56ce8d962bf55450bb1fc2c5ba9c0e300fcf8da"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.271369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" event={"ID":"683c5938-459a-4c60-bb98-8237f6ddc4f6","Type":"ContainerStarted","Data":"13bb0b046dfff303d6eb127cf9cd11e65de979ab6ad5bf7d27c44a41ed6435a8"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.273164 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" event={"ID":"c79f46e0-5947-4b4a-b581-0e49736fb41f","Type":"ContainerStarted","Data":"d6b75f8594b6487931baf510da8f51fc156d185ff84446ad72749e56a50bfe8e"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.273880 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" event={"ID":"c15ac393-953d-45e9-b8dc-7212c6e2366b","Type":"ContainerStarted","Data":"6c68ae1c0a9a55df9a1059f4e535648cb206ed799a2dba42d28b65df7b9f47f0"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.275166 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" event={"ID":"ff2da961-d2c6-486f-87bf-2394ee00a5a1","Type":"ContainerStarted","Data":"0beead8ab1f8fe263861c2edca6f762e68501a6fc8ffde91ef4cbc0fd597bb1d"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.276367 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" event={"ID":"cdf9b744-368d-4c91-8ecf-6a5d983f3eb7","Type":"ContainerStarted","Data":"ec1a4e808a8b5d962d33d512425bd5b1d278f910d8551c8c6cdca6b606436490"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.277456 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" event={"ID":"48a2075e-30bb-41fb-a311-fb9b593182c6","Type":"ContainerStarted","Data":"fbf566bbd59495dced4b7357e52912c3f3c370e4103262e4aad6b8e2d524e69e"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.279084 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" event={"ID":"6edb6d08-9885-457f-8642-ef77c64de97a","Type":"ContainerStarted","Data":"9bfae4b5a41a7625f64f0d2534035b858415749e93d359102a1970c497864307"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.280270 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" event={"ID":"9f897b64-3aeb-44c6-a340-9e0082876e93","Type":"ContainerStarted","Data":"528ae12fe914f50a9b6e45df3ee87e5eecb5d24435a660e546db5979927f1421"} Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.323333 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.341950 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.347815 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g"] Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.353309 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-csxk6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-q5ksl_openstack-operators(387ba01a-b726-4c0c-b3ab-160be43d9587): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.354351 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b263984_5a9b_45eb_886b_b8209ada6a7a.slice/crio-2bf776c7ccd5af80b9f11748bd84f6f5254dc419efd80c2aaa403bcd3d626246 WatchSource:0}: Error finding container 2bf776c7ccd5af80b9f11748bd84f6f5254dc419efd80c2aaa403bcd3d626246: Status 404 returned error can't find the container with id 2bf776c7ccd5af80b9f11748bd84f6f5254dc419efd80c2aaa403bcd3d626246 Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.355117 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-csxk6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-q5ksl_openstack-operators(387ba01a-b726-4c0c-b3ab-160be43d9587): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.356279 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" podUID="387ba01a-b726-4c0c-b3ab-160be43d9587" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.357843 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-frcbg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-hvc5g_openstack-operators(3b263984-5a9b-45eb-886b-b8209ada6a7a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.360153 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-frcbg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-hvc5g_openstack-operators(3b263984-5a9b-45eb-886b-b8209ada6a7a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.361375 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" podUID="3b263984-5a9b-45eb-886b-b8209ada6a7a" Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.362990 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr"] Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.427149 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c27d60c_a8e1_4616_88b2_391876d4112d.slice/crio-62650de58a19bc38c44d688885e1b7f08fcd26716e68364504084ee0b2c21319 WatchSource:0}: Error finding container 62650de58a19bc38c44d688885e1b7f08fcd26716e68364504084ee0b2c21319: Status 404 returned error can't find the container with id 62650de58a19bc38c44d688885e1b7f08fcd26716e68364504084ee0b2c21319 Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.428559 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-xgflv"] Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.432562 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-462h4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-xgflv_openstack-operators(6c27d60c-a8e1-4616-88b2-391876d4112d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.435575 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-462h4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-xgflv_openstack-operators(6c27d60c-a8e1-4616-88b2-391876d4112d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.436690 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" podUID="6c27d60c-a8e1-4616-88b2-391876d4112d" Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.438778 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr"] Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.439156 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53e9bbdb_0559_4053_b38c_395876f9d69f.slice/crio-b87a8dd0d4f63ea0c5041d650461a4973666a267297d93d89f5238ae73487211 WatchSource:0}: Error finding container b87a8dd0d4f63ea0c5041d650461a4973666a267297d93d89f5238ae73487211: Status 404 returned error can't find the container with id b87a8dd0d4f63ea0c5041d650461a4973666a267297d93d89f5238ae73487211 Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.441359 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jbzdh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-chg27_openstack-operators(53e9bbdb-0559-4053-b38c-395876f9d69f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: W1205 15:13:47.442301 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf20a713_1c9f_4738_8401_ddff0dcf0c38.slice/crio-b7b5944423f9ab4796fb693cdd6d55f3e66cd16d50fedecede92aea25f60cec5 WatchSource:0}: Error finding container b7b5944423f9ab4796fb693cdd6d55f3e66cd16d50fedecede92aea25f60cec5: Status 404 returned error can't find the container with id b7b5944423f9ab4796fb693cdd6d55f3e66cd16d50fedecede92aea25f60cec5 Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.444908 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vbp5b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-g5t4m_openstack-operators(df20a713-1c9f-4738-8401-ddff0dcf0c38): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.445022 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m"] Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.445261 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qf62d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-fvrzr_openstack-operators(919c5760-f9dd-4b40-9b91-ea3b11d13a26): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.445337 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jbzdh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-chg27_openstack-operators(53e9bbdb-0559-4053-b38c-395876f9d69f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.446475 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" podUID="53e9bbdb-0559-4053-b38c-395876f9d69f" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.448244 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qf62d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-fvrzr_openstack-operators(919c5760-f9dd-4b40-9b91-ea3b11d13a26): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.448480 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vbp5b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-g5t4m_openstack-operators(df20a713-1c9f-4738-8401-ddff0dcf0c38): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.449832 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" podUID="919c5760-f9dd-4b40-9b91-ea3b11d13a26" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.449830 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" podUID="df20a713-1c9f-4738-8401-ddff0dcf0c38" Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.450382 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.490358 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.490556 4840 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.490609 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert podName:8e7074a0-bae6-49e7-8915-c4cb3242108d nodeName:}" failed. No retries permitted until 2025-12-05 15:13:51.49059173 +0000 UTC m=+909.831654344 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert") pod "infra-operator-controller-manager-57548d458d-gpcmz" (UID: "8e7074a0-bae6-49e7-8915-c4cb3242108d") : secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.537012 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr"] Dec 05 15:13:47 crc kubenswrapper[4840]: I1205 15:13:47.559218 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9"] Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.562997 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kqzpv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-r2br9_openstack-operators(e1dca245-f390-4f32-8683-eea98ad3fb45): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 15:13:47 crc kubenswrapper[4840]: E1205 15:13:47.564445 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" podUID="e1dca245-f390-4f32-8683-eea98ad3fb45" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.296818 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" event={"ID":"e1dca245-f390-4f32-8683-eea98ad3fb45","Type":"ContainerStarted","Data":"982ccc2593e20870a1d797703584457df450fd3dc8c39606b5c756ed76cbee54"} Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.299463 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" event={"ID":"f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08","Type":"ContainerStarted","Data":"770dbbee78117dcfbb01dbca46f23d4f711827806420c7583ffab5e641b69631"} Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.299924 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" podUID="e1dca245-f390-4f32-8683-eea98ad3fb45" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.307986 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" event={"ID":"df20a713-1c9f-4738-8401-ddff0dcf0c38","Type":"ContainerStarted","Data":"b7b5944423f9ab4796fb693cdd6d55f3e66cd16d50fedecede92aea25f60cec5"} Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.309822 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" event={"ID":"ae41b596-75b2-46ab-b95a-ef7b41f1e66b","Type":"ContainerStarted","Data":"3eb85b65baf4bcc4781e2371e0d4dcd24a7a3da6be980cb344758ff099aeba82"} Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.313660 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" podUID="df20a713-1c9f-4738-8401-ddff0dcf0c38" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.333830 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" event={"ID":"3b263984-5a9b-45eb-886b-b8209ada6a7a","Type":"ContainerStarted","Data":"2bf776c7ccd5af80b9f11748bd84f6f5254dc419efd80c2aaa403bcd3d626246"} Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.336716 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" podUID="3b263984-5a9b-45eb-886b-b8209ada6a7a" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.337070 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" event={"ID":"31eda6ed-1dee-4670-a6d3-22871423db53","Type":"ContainerStarted","Data":"6ca76d2153beee49fd7264b3865110c25f8621463d38a51cd1c06fe1ac33eb98"} Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.351574 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" event={"ID":"919c5760-f9dd-4b40-9b91-ea3b11d13a26","Type":"ContainerStarted","Data":"e06369154458ee1236efeafaa97df5bb2ab8e2d45d9f48aae0cd83b0dbff85ad"} Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.368081 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" podUID="919c5760-f9dd-4b40-9b91-ea3b11d13a26" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.386986 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" event={"ID":"53e9bbdb-0559-4053-b38c-395876f9d69f","Type":"ContainerStarted","Data":"b87a8dd0d4f63ea0c5041d650461a4973666a267297d93d89f5238ae73487211"} Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.397959 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" podUID="53e9bbdb-0559-4053-b38c-395876f9d69f" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.402065 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" event={"ID":"387ba01a-b726-4c0c-b3ab-160be43d9587","Type":"ContainerStarted","Data":"e5fb8b711de46b7e2dffa98f78487b13dcd4193460457fa8926ba7e24e90c681"} Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.416031 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" podUID="387ba01a-b726-4c0c-b3ab-160be43d9587" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.421011 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" event={"ID":"6c27d60c-a8e1-4616-88b2-391876d4112d","Type":"ContainerStarted","Data":"62650de58a19bc38c44d688885e1b7f08fcd26716e68364504084ee0b2c21319"} Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.426056 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" podUID="6c27d60c-a8e1-4616-88b2-391876d4112d" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.617779 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.617975 4840 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.618053 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert podName:a18cf365-d0a8-4750-b11e-12d608ceb0e9 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:52.618034777 +0000 UTC m=+910.959097391 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" (UID: "a18cf365-d0a8-4750-b11e-12d608ceb0e9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.927395 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:48 crc kubenswrapper[4840]: I1205 15:13:48.927513 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.927590 4840 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.927664 4840 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.927680 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:52.927660575 +0000 UTC m=+911.268723189 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "webhook-server-cert" not found Dec 05 15:13:48 crc kubenswrapper[4840]: E1205 15:13:48.927761 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:13:52.927741307 +0000 UTC m=+911.268803921 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "metrics-server-cert" not found Dec 05 15:13:49 crc kubenswrapper[4840]: E1205 15:13:49.444961 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" podUID="e1dca245-f390-4f32-8683-eea98ad3fb45" Dec 05 15:13:49 crc kubenswrapper[4840]: E1205 15:13:49.446418 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" podUID="53e9bbdb-0559-4053-b38c-395876f9d69f" Dec 05 15:13:49 crc kubenswrapper[4840]: E1205 15:13:49.446578 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" podUID="387ba01a-b726-4c0c-b3ab-160be43d9587" Dec 05 15:13:49 crc kubenswrapper[4840]: E1205 15:13:49.446666 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" podUID="6c27d60c-a8e1-4616-88b2-391876d4112d" Dec 05 15:13:49 crc kubenswrapper[4840]: E1205 15:13:49.446740 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" podUID="df20a713-1c9f-4738-8401-ddff0dcf0c38" Dec 05 15:13:49 crc kubenswrapper[4840]: E1205 15:13:49.446819 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" podUID="919c5760-f9dd-4b40-9b91-ea3b11d13a26" Dec 05 15:13:49 crc kubenswrapper[4840]: E1205 15:13:49.446912 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" podUID="3b263984-5a9b-45eb-886b-b8209ada6a7a" Dec 05 15:13:51 crc kubenswrapper[4840]: I1205 15:13:51.625971 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:51 crc kubenswrapper[4840]: E1205 15:13:51.626182 4840 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:51 crc kubenswrapper[4840]: E1205 15:13:51.627444 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert podName:8e7074a0-bae6-49e7-8915-c4cb3242108d nodeName:}" failed. No retries permitted until 2025-12-05 15:13:59.627427886 +0000 UTC m=+917.968490500 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert") pod "infra-operator-controller-manager-57548d458d-gpcmz" (UID: "8e7074a0-bae6-49e7-8915-c4cb3242108d") : secret "infra-operator-webhook-server-cert" not found Dec 05 15:13:52 crc kubenswrapper[4840]: I1205 15:13:52.623053 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:13:52 crc kubenswrapper[4840]: E1205 15:13:52.623470 4840 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:52 crc kubenswrapper[4840]: E1205 15:13:52.623518 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert podName:a18cf365-d0a8-4750-b11e-12d608ceb0e9 nodeName:}" failed. No retries permitted until 2025-12-05 15:14:00.623503552 +0000 UTC m=+918.964566166 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" (UID: "a18cf365-d0a8-4750-b11e-12d608ceb0e9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 15:13:52 crc kubenswrapper[4840]: I1205 15:13:52.927847 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:52 crc kubenswrapper[4840]: I1205 15:13:52.928093 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:13:52 crc kubenswrapper[4840]: E1205 15:13:52.928128 4840 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 15:13:52 crc kubenswrapper[4840]: E1205 15:13:52.928209 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:14:00.92818805 +0000 UTC m=+919.269250664 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "metrics-server-cert" not found Dec 05 15:13:52 crc kubenswrapper[4840]: E1205 15:13:52.928309 4840 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 15:13:52 crc kubenswrapper[4840]: E1205 15:13:52.928377 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs podName:187efc3a-77ce-4898-89d9-5785491d5d29 nodeName:}" failed. No retries permitted until 2025-12-05 15:14:00.928356625 +0000 UTC m=+919.269419339 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs") pod "openstack-operator-controller-manager-79cb7dcf7d-pw6j8" (UID: "187efc3a-77ce-4898-89d9-5785491d5d29") : secret "webhook-server-cert" not found Dec 05 15:13:59 crc kubenswrapper[4840]: I1205 15:13:59.656047 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:59 crc kubenswrapper[4840]: I1205 15:13:59.678590 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8e7074a0-bae6-49e7-8915-c4cb3242108d-cert\") pod \"infra-operator-controller-manager-57548d458d-gpcmz\" (UID: \"8e7074a0-bae6-49e7-8915-c4cb3242108d\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:13:59 crc kubenswrapper[4840]: I1205 15:13:59.802032 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:14:00 crc kubenswrapper[4840]: I1205 15:14:00.663210 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:14:00 crc kubenswrapper[4840]: I1205 15:14:00.789303 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a18cf365-d0a8-4750-b11e-12d608ceb0e9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt\" (UID: \"a18cf365-d0a8-4750-b11e-12d608ceb0e9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:14:00 crc kubenswrapper[4840]: I1205 15:14:00.961352 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:14:00 crc kubenswrapper[4840]: I1205 15:14:00.965853 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:14:00 crc kubenswrapper[4840]: I1205 15:14:00.965998 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:14:00 crc kubenswrapper[4840]: I1205 15:14:00.974230 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-webhook-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:14:00 crc kubenswrapper[4840]: I1205 15:14:00.991981 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/187efc3a-77ce-4898-89d9-5785491d5d29-metrics-certs\") pod \"openstack-operator-controller-manager-79cb7dcf7d-pw6j8\" (UID: \"187efc3a-77ce-4898-89d9-5785491d5d29\") " pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:14:01 crc kubenswrapper[4840]: I1205 15:14:01.050290 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:14:01 crc kubenswrapper[4840]: E1205 15:14:01.513958 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 05 15:14:01 crc kubenswrapper[4840]: E1205 15:14:01.514178 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6h9hz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-q2pp7_openstack-operators(48a2075e-30bb-41fb-a311-fb9b593182c6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:02 crc kubenswrapper[4840]: E1205 15:14:02.338915 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 05 15:14:02 crc kubenswrapper[4840]: E1205 15:14:02.339305 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-trqkz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-spvpr_openstack-operators(f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:03 crc kubenswrapper[4840]: E1205 15:14:03.274108 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 05 15:14:03 crc kubenswrapper[4840]: E1205 15:14:03.274331 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zkm9f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-8jvnm_openstack-operators(31eda6ed-1dee-4670-a6d3-22871423db53): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:04 crc kubenswrapper[4840]: E1205 15:14:04.482504 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809" Dec 05 15:14:04 crc kubenswrapper[4840]: E1205 15:14:04.483056 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wfcp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-fwvd2_openstack-operators(c15ac393-953d-45e9-b8dc-7212c6e2366b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:05 crc kubenswrapper[4840]: E1205 15:14:05.741635 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 05 15:14:05 crc kubenswrapper[4840]: E1205 15:14:05.742140 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lqsfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-7zmt7_openstack-operators(c79f46e0-5947-4b4a-b581-0e49736fb41f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:06 crc kubenswrapper[4840]: E1205 15:14:06.515546 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 15:14:06 crc kubenswrapper[4840]: E1205 15:14:06.515770 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cpgxf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-spqrr_openstack-operators(ae41b596-75b2-46ab-b95a-ef7b41f1e66b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:07 crc kubenswrapper[4840]: E1205 15:14:07.128307 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 15:14:07 crc kubenswrapper[4840]: E1205 15:14:07.128463 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4qbp2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-zjbwl_openstack-operators(6edb6d08-9885-457f-8642-ef77c64de97a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.786577 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c8jdn"] Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.790756 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.804454 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8jdn"] Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.814941 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c8l9\" (UniqueName: \"kubernetes.io/projected/df1dca25-320c-4c19-a6c7-b113a3ec4de4-kube-api-access-6c8l9\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.815030 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-catalog-content\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.815070 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-utilities\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.916171 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-utilities\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.916239 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c8l9\" (UniqueName: \"kubernetes.io/projected/df1dca25-320c-4c19-a6c7-b113a3ec4de4-kube-api-access-6c8l9\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.916321 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-catalog-content\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.916958 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-catalog-content\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.917094 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-utilities\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:11 crc kubenswrapper[4840]: I1205 15:14:11.944107 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c8l9\" (UniqueName: \"kubernetes.io/projected/df1dca25-320c-4c19-a6c7-b113a3ec4de4-kube-api-access-6c8l9\") pod \"community-operators-c8jdn\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:12 crc kubenswrapper[4840]: I1205 15:14:12.116814 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.502120 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt"] Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.638019 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-scqgw"] Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.639435 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.656454 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqgw"] Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.745452 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6da7f018-2512-4ff2-8824-f90c5a0d5abf-utilities\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.745571 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xstsh\" (UniqueName: \"kubernetes.io/projected/6da7f018-2512-4ff2-8824-f90c5a0d5abf-kube-api-access-xstsh\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.747201 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6da7f018-2512-4ff2-8824-f90c5a0d5abf-catalog-content\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.848315 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6da7f018-2512-4ff2-8824-f90c5a0d5abf-catalog-content\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.848372 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6da7f018-2512-4ff2-8824-f90c5a0d5abf-utilities\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.848407 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xstsh\" (UniqueName: \"kubernetes.io/projected/6da7f018-2512-4ff2-8824-f90c5a0d5abf-kube-api-access-xstsh\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.849894 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6da7f018-2512-4ff2-8824-f90c5a0d5abf-catalog-content\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.850107 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6da7f018-2512-4ff2-8824-f90c5a0d5abf-utilities\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.865744 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xstsh\" (UniqueName: \"kubernetes.io/projected/6da7f018-2512-4ff2-8824-f90c5a0d5abf-kube-api-access-xstsh\") pod \"redhat-marketplace-scqgw\" (UID: \"6da7f018-2512-4ff2-8824-f90c5a0d5abf\") " pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:15 crc kubenswrapper[4840]: I1205 15:14:15.976133 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:17 crc kubenswrapper[4840]: W1205 15:14:17.954979 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda18cf365_d0a8_4750_b11e_12d608ceb0e9.slice/crio-f9b91c6e37f16eb32bdd5ac28ecb2d9c011e1948cd4137bc726e78b20665cda4 WatchSource:0}: Error finding container f9b91c6e37f16eb32bdd5ac28ecb2d9c011e1948cd4137bc726e78b20665cda4: Status 404 returned error can't find the container with id f9b91c6e37f16eb32bdd5ac28ecb2d9c011e1948cd4137bc726e78b20665cda4 Dec 05 15:14:18 crc kubenswrapper[4840]: I1205 15:14:18.010090 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" event={"ID":"a18cf365-d0a8-4750-b11e-12d608ceb0e9","Type":"ContainerStarted","Data":"f9b91c6e37f16eb32bdd5ac28ecb2d9c011e1948cd4137bc726e78b20665cda4"} Dec 05 15:14:18 crc kubenswrapper[4840]: I1205 15:14:18.207889 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz"] Dec 05 15:14:18 crc kubenswrapper[4840]: I1205 15:14:18.364986 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8"] Dec 05 15:14:18 crc kubenswrapper[4840]: W1205 15:14:18.478427 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e7074a0_bae6_49e7_8915_c4cb3242108d.slice/crio-7eebc68361752fcddb97dbc6235e7c767ba4af9d39a71e289b0cbc09f37dbfbc WatchSource:0}: Error finding container 7eebc68361752fcddb97dbc6235e7c767ba4af9d39a71e289b0cbc09f37dbfbc: Status 404 returned error can't find the container with id 7eebc68361752fcddb97dbc6235e7c767ba4af9d39a71e289b0cbc09f37dbfbc Dec 05 15:14:18 crc kubenswrapper[4840]: W1205 15:14:18.481323 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod187efc3a_77ce_4898_89d9_5785491d5d29.slice/crio-c4951ec591ee2a290c509831df7a6bef08c676757ae6d86d5b5d9cc7d18b70c1 WatchSource:0}: Error finding container c4951ec591ee2a290c509831df7a6bef08c676757ae6d86d5b5d9cc7d18b70c1: Status 404 returned error can't find the container with id c4951ec591ee2a290c509831df7a6bef08c676757ae6d86d5b5d9cc7d18b70c1 Dec 05 15:14:18 crc kubenswrapper[4840]: I1205 15:14:18.739104 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8jdn"] Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.027788 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" event={"ID":"683c5938-459a-4c60-bb98-8237f6ddc4f6","Type":"ContainerStarted","Data":"4e20bc6ffe13d8f9096034d4678f12cc4c7758e2d221bb355c925a064764d815"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.035196 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8jdn" event={"ID":"df1dca25-320c-4c19-a6c7-b113a3ec4de4","Type":"ContainerStarted","Data":"6baf5c0ff9f1008f1bce6b0c111f7fe695f3ef874820a4c4e3dc4ac0ccdfc5da"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.036237 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" event={"ID":"8e7074a0-bae6-49e7-8915-c4cb3242108d","Type":"ContainerStarted","Data":"7eebc68361752fcddb97dbc6235e7c767ba4af9d39a71e289b0cbc09f37dbfbc"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.038507 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" event={"ID":"ff2da961-d2c6-486f-87bf-2394ee00a5a1","Type":"ContainerStarted","Data":"6c8644a89a9af5b399199064b89eb9517b031b21d095763ded568f9ab88317e5"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.040601 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" event={"ID":"94defa3c-b83c-44b9-83c0-e92bdf7944be","Type":"ContainerStarted","Data":"b87ae1cd018e48e34e94c599b3efde5452d657799c7f9acdb32df32b850a94cc"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.042152 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" event={"ID":"546f9401-ad92-49f1-836a-8e240bbc2d61","Type":"ContainerStarted","Data":"9c8e73f4efa5843fe87d18b3c54c20b31b5bf2ab37ac50303ed6dc1e619ef4dc"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.044564 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" event={"ID":"187efc3a-77ce-4898-89d9-5785491d5d29","Type":"ContainerStarted","Data":"c4951ec591ee2a290c509831df7a6bef08c676757ae6d86d5b5d9cc7d18b70c1"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.046994 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" event={"ID":"cdf9b744-368d-4c91-8ecf-6a5d983f3eb7","Type":"ContainerStarted","Data":"75bb9b3d4ce368925e8a83d958616fafd85982e205c4c3559013ef9786637fcd"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.048014 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" event={"ID":"9f897b64-3aeb-44c6-a340-9e0082876e93","Type":"ContainerStarted","Data":"811c7e9ce9bd591c00084eeeb1999691b1642368cf95d3b717fe0b64ccebf88e"} Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.409954 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqgw"] Dec 05 15:14:19 crc kubenswrapper[4840]: W1205 15:14:19.468768 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6da7f018_2512_4ff2_8824_f90c5a0d5abf.slice/crio-4fc79f1f356c34c7bb1197531e1671d8f77fadce82ae5a74733400d0801b557b WatchSource:0}: Error finding container 4fc79f1f356c34c7bb1197531e1671d8f77fadce82ae5a74733400d0801b557b: Status 404 returned error can't find the container with id 4fc79f1f356c34c7bb1197531e1671d8f77fadce82ae5a74733400d0801b557b Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.489486 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:14:19 crc kubenswrapper[4840]: I1205 15:14:19.489533 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:14:20 crc kubenswrapper[4840]: I1205 15:14:20.055074 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqgw" event={"ID":"6da7f018-2512-4ff2-8824-f90c5a0d5abf","Type":"ContainerStarted","Data":"4fc79f1f356c34c7bb1197531e1671d8f77fadce82ae5a74733400d0801b557b"} Dec 05 15:14:21 crc kubenswrapper[4840]: I1205 15:14:21.115417 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" event={"ID":"df20a713-1c9f-4738-8401-ddff0dcf0c38","Type":"ContainerStarted","Data":"d6cefd6451e65b48808160e22bdd21cbdd5a7a325d31f36292b35f4a8e08b05e"} Dec 05 15:14:21 crc kubenswrapper[4840]: I1205 15:14:21.118007 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" event={"ID":"3b263984-5a9b-45eb-886b-b8209ada6a7a","Type":"ContainerStarted","Data":"cb1453cfb964dc5b71da5d9c4aba7fe115ecec8c758028bdc6c6a6294d3a77ed"} Dec 05 15:14:21 crc kubenswrapper[4840]: I1205 15:14:21.122612 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" event={"ID":"187efc3a-77ce-4898-89d9-5785491d5d29","Type":"ContainerStarted","Data":"eb7d65b38fa5500b263389f13700680587dc03313ed87644db6da7c520f186d7"} Dec 05 15:14:21 crc kubenswrapper[4840]: I1205 15:14:21.123690 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:14:21 crc kubenswrapper[4840]: I1205 15:14:21.129112 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" event={"ID":"387ba01a-b726-4c0c-b3ab-160be43d9587","Type":"ContainerStarted","Data":"1b7497120d93d7702f3e6a2c5d38b2dceff080756e231757e588ded2ed735c0e"} Dec 05 15:14:22 crc kubenswrapper[4840]: I1205 15:14:22.219213 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" podStartSLOduration=38.219198735 podStartE2EDuration="38.219198735s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:14:21.16378727 +0000 UTC m=+939.504849904" watchObservedRunningTime="2025-12-05 15:14:22.219198735 +0000 UTC m=+940.560261349" Dec 05 15:14:31 crc kubenswrapper[4840]: I1205 15:14:31.871001 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-79cb7dcf7d-pw6j8" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.193394 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5x2cw"] Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.196484 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.210787 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x2cw"] Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.345539 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-utilities\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.345603 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-catalog-content\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.345628 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c2vd\" (UniqueName: \"kubernetes.io/projected/980c2743-6e13-40d7-8752-8f3fefc3fefb-kube-api-access-6c2vd\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.447107 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-utilities\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.447174 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-catalog-content\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.447204 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c2vd\" (UniqueName: \"kubernetes.io/projected/980c2743-6e13-40d7-8752-8f3fefc3fefb-kube-api-access-6c2vd\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.447763 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-utilities\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.447781 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-catalog-content\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.467032 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c2vd\" (UniqueName: \"kubernetes.io/projected/980c2743-6e13-40d7-8752-8f3fefc3fefb-kube-api-access-6c2vd\") pod \"certified-operators-5x2cw\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:34 crc kubenswrapper[4840]: I1205 15:14:34.516564 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:37 crc kubenswrapper[4840]: E1205 15:14:37.632184 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:37 crc kubenswrapper[4840]: E1205 15:14:37.632920 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zkm9f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-8jvnm_openstack-operators(31eda6ed-1dee-4670-a6d3-22871423db53): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:37 crc kubenswrapper[4840]: E1205 15:14:37.634303 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" podUID="31eda6ed-1dee-4670-a6d3-22871423db53" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.072779 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.073131 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-trqkz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-spvpr_openstack-operators(f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.074582 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" podUID="f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.089902 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.090078 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6h9hz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-q2pp7_openstack-operators(48a2075e-30bb-41fb-a311-fb9b593182c6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.091447 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" podUID="48a2075e-30bb-41fb-a311-fb9b593182c6" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.133811 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.133996 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wfcp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-fwvd2_openstack-operators(c15ac393-953d-45e9-b8dc-7212c6e2366b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.135184 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" podUID="c15ac393-953d-45e9-b8dc-7212c6e2366b" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.172786 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.172966 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4qbp2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-zjbwl_openstack-operators(6edb6d08-9885-457f-8642-ef77c64de97a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.174118 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" podUID="6edb6d08-9885-457f-8642-ef77c64de97a" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.189751 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.189850 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cpgxf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-spqrr_openstack-operators(ae41b596-75b2-46ab-b95a-ef7b41f1e66b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.190981 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" podUID="ae41b596-75b2-46ab-b95a-ef7b41f1e66b" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.216241 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.216423 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lqsfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-7zmt7_openstack-operators(c79f46e0-5947-4b4a-b581-0e49736fb41f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.218706 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" podUID="c79f46e0-5947-4b4a-b581-0e49736fb41f" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.387210 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81" Dec 05 15:14:38 crc kubenswrapper[4840]: E1205 15:14:38.387691 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g7w55,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt_openstack-operators(a18cf365-d0a8-4750-b11e-12d608ceb0e9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:38 crc kubenswrapper[4840]: I1205 15:14:38.440475 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" event={"ID":"53e9bbdb-0559-4053-b38c-395876f9d69f","Type":"ContainerStarted","Data":"0f079e769fe6b55fca68f160bcedc08a6677bdc1265403b92c3760bc052fb29a"} Dec 05 15:14:38 crc kubenswrapper[4840]: I1205 15:14:38.441922 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" event={"ID":"6c27d60c-a8e1-4616-88b2-391876d4112d","Type":"ContainerStarted","Data":"747240bb0eed506bf8bc2595d1220dac2133a8c52fab8b88479d2bb18a6270ee"} Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.137174 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.137383 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bhzjh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-57548d458d-gpcmz_openstack-operators(8e7074a0-bae6-49e7-8915-c4cb3242108d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.184025 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.184032 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.184401 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4csdn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-dcbzq_openstack-operators(94defa3c-b83c-44b9-83c0-e92bdf7944be): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.184530 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vh6rh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-n6chf_openstack-operators(683c5938-459a-4c60-bb98-8237f6ddc4f6): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.186653 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" podUID="94defa3c-b83c-44b9-83c0-e92bdf7944be" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.186672 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" podUID="683c5938-459a-4c60-bb98-8237f6ddc4f6" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.186754 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.186885 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xqpkd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-5v888_openstack-operators(cdf9b744-368d-4c91-8ecf-6a5d983f3eb7): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.187574 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.187750 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ncmwh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-qz4dx_openstack-operators(546f9401-ad92-49f1-836a-8e240bbc2d61): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.188938 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" podUID="546f9401-ad92-49f1-836a-8e240bbc2d61" Dec 05 15:14:39 crc kubenswrapper[4840]: E1205 15:14:39.190192 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" podUID="cdf9b744-368d-4c91-8ecf-6a5d983f3eb7" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.469932 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" event={"ID":"919c5760-f9dd-4b40-9b91-ea3b11d13a26","Type":"ContainerStarted","Data":"59922f61fe00134515efc090521b3d46f4ac28c09004ecee49ab27d5da88cf18"} Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.480671 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.480695 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.480705 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.480713 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.485289 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.486073 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.486186 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.488210 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" Dec 05 15:14:39 crc kubenswrapper[4840]: I1205 15:14:39.629974 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x2cw"] Dec 05 15:14:40 crc kubenswrapper[4840]: E1205 15:14:40.291146 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" podUID="a18cf365-d0a8-4750-b11e-12d608ceb0e9" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.502137 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" event={"ID":"48a2075e-30bb-41fb-a311-fb9b593182c6","Type":"ContainerStarted","Data":"59deeaa49cce1ff4d23ce341f1d4a78ac64dc15fc2f3d94f83547c4e225d1255"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.518132 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" event={"ID":"a18cf365-d0a8-4750-b11e-12d608ceb0e9","Type":"ContainerStarted","Data":"6e58016e3104a7d33efa2e430ff0ad2037be6dc92a985d740f8f4e67168720ef"} Dec 05 15:14:40 crc kubenswrapper[4840]: E1205 15:14:40.520513 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" podUID="a18cf365-d0a8-4750-b11e-12d608ceb0e9" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.536212 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" event={"ID":"6c27d60c-a8e1-4616-88b2-391876d4112d","Type":"ContainerStarted","Data":"88747af3513956a1b241cd6a6986d6ef9939daa4c480c90d9ff7a0c25686be75"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.536472 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.551770 4840 generic.go:334] "Generic (PLEG): container finished" podID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerID="90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a" exitCode=0 Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.551894 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8jdn" event={"ID":"df1dca25-320c-4c19-a6c7-b113a3ec4de4","Type":"ContainerDied","Data":"90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.573791 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerStarted","Data":"ca23fa2f0ae720a8b0e15f4a88a88ad0c9085bda59ac8ea72db9a9e7453687a2"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.573832 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerStarted","Data":"163dad06520a6217966cf7bd510c999da9d1da13bad01f487db71414f381c788"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.581677 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" event={"ID":"3b263984-5a9b-45eb-886b-b8209ada6a7a","Type":"ContainerStarted","Data":"886c0a4e00c94994c7db8462fca7ab2874efc5e241d77b6b5b7bf1575a9bbb96"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.582923 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.587393 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.587430 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" podStartSLOduration=5.66724183 podStartE2EDuration="57.587350205s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.432437714 +0000 UTC m=+905.773500328" lastFinishedPulling="2025-12-05 15:14:39.352546089 +0000 UTC m=+957.693608703" observedRunningTime="2025-12-05 15:14:40.574848751 +0000 UTC m=+958.915911365" watchObservedRunningTime="2025-12-05 15:14:40.587350205 +0000 UTC m=+958.928412819" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.591025 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" event={"ID":"31eda6ed-1dee-4670-a6d3-22871423db53","Type":"ContainerStarted","Data":"cc33a89a97326710d2bdffd94b544be78de08ba1a90c524f99b1af4559c3eea9"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.607460 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" event={"ID":"ff2da961-d2c6-486f-87bf-2394ee00a5a1","Type":"ContainerStarted","Data":"dcf359899ffd0affc5526d83ca754f9b4e63df8fa90124b0e5d671d123f4f172"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.608330 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.620263 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" event={"ID":"94defa3c-b83c-44b9-83c0-e92bdf7944be","Type":"ContainerStarted","Data":"459b931d4c45eae1be25497c6a1c71d707ca9fcb45135f4aad813de39c50dadd"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.620531 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.626128 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.627363 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" event={"ID":"53e9bbdb-0559-4053-b38c-395876f9d69f","Type":"ContainerStarted","Data":"47149dc8d2d1be78071468ef37d53b4cc9a437a5ed8b5d659082ffdce136df0c"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.647173 4840 generic.go:334] "Generic (PLEG): container finished" podID="6da7f018-2512-4ff2-8824-f90c5a0d5abf" containerID="58c8b4884b11a4bd44c7330ec91321987923f246df1d8e5e7e9da4ba29dac6c1" exitCode=0 Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.647348 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqgw" event={"ID":"6da7f018-2512-4ff2-8824-f90c5a0d5abf","Type":"ContainerDied","Data":"58c8b4884b11a4bd44c7330ec91321987923f246df1d8e5e7e9da4ba29dac6c1"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.660065 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" event={"ID":"e1dca245-f390-4f32-8683-eea98ad3fb45","Type":"ContainerStarted","Data":"5ae6374976fa77513b815fa6cbcc7022e9fe53c62caa8c01e2f529d6f2e2bca6"} Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.852688 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-r2br9" podStartSLOduration=24.7683438 podStartE2EDuration="55.852672458s" podCreationTimestamp="2025-12-05 15:13:45 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.562887618 +0000 UTC m=+905.903950232" lastFinishedPulling="2025-12-05 15:14:18.647216266 +0000 UTC m=+936.988278890" observedRunningTime="2025-12-05 15:14:40.851065263 +0000 UTC m=+959.192127877" watchObservedRunningTime="2025-12-05 15:14:40.852672458 +0000 UTC m=+959.193735072" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.878766 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-dcbzq" podStartSLOduration=38.307967551 podStartE2EDuration="57.878751686s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:46.919358134 +0000 UTC m=+905.260420748" lastFinishedPulling="2025-12-05 15:14:06.490142269 +0000 UTC m=+924.831204883" observedRunningTime="2025-12-05 15:14:40.877687046 +0000 UTC m=+959.218749660" watchObservedRunningTime="2025-12-05 15:14:40.878751686 +0000 UTC m=+959.219814300" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.921189 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" podStartSLOduration=5.016376736 podStartE2EDuration="56.921169408s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.441261863 +0000 UTC m=+905.782324477" lastFinishedPulling="2025-12-05 15:14:39.346054535 +0000 UTC m=+957.687117149" observedRunningTime="2025-12-05 15:14:40.915198419 +0000 UTC m=+959.256261033" watchObservedRunningTime="2025-12-05 15:14:40.921169408 +0000 UTC m=+959.262232022" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.942253 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fmbzp" podStartSLOduration=5.697533997 podStartE2EDuration="57.942232084s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.101365148 +0000 UTC m=+905.442427762" lastFinishedPulling="2025-12-05 15:14:39.346063235 +0000 UTC m=+957.687125849" observedRunningTime="2025-12-05 15:14:40.939070865 +0000 UTC m=+959.280133479" watchObservedRunningTime="2025-12-05 15:14:40.942232084 +0000 UTC m=+959.283294688" Dec 05 15:14:40 crc kubenswrapper[4840]: I1205 15:14:40.992186 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-hvc5g" podStartSLOduration=5.996716859 podStartE2EDuration="57.992158488s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.357739468 +0000 UTC m=+905.698802082" lastFinishedPulling="2025-12-05 15:14:39.353181097 +0000 UTC m=+957.694243711" observedRunningTime="2025-12-05 15:14:40.971597316 +0000 UTC m=+959.312659930" watchObservedRunningTime="2025-12-05 15:14:40.992158488 +0000 UTC m=+959.333221132" Dec 05 15:14:41 crc kubenswrapper[4840]: E1205 15:14:41.535212 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" podUID="8e7074a0-bae6-49e7-8915-c4cb3242108d" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.023365 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" event={"ID":"cdf9b744-368d-4c91-8ecf-6a5d983f3eb7","Type":"ContainerStarted","Data":"27738ec766760c373d56e6d27e8bc4abde11554b624d0d52c02bc239edae3f28"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.028601 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" event={"ID":"c79f46e0-5947-4b4a-b581-0e49736fb41f","Type":"ContainerStarted","Data":"6e1901c2c2670c344dbbb0d1be9c35d718bf58ddcb838fbd6bcb7835242972c3"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.030175 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" event={"ID":"6edb6d08-9885-457f-8642-ef77c64de97a","Type":"ContainerStarted","Data":"563c0f532ce718c5c4b87a3030138b6afaff0dcde0e9a755d278b6e6fc8b96b2"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.031446 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" event={"ID":"546f9401-ad92-49f1-836a-8e240bbc2d61","Type":"ContainerStarted","Data":"fed74b19fceed74b482c0a01f803122c2d56b0357c4cade783ba25ef84ceaf8b"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.038472 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" event={"ID":"48a2075e-30bb-41fb-a311-fb9b593182c6","Type":"ContainerStarted","Data":"e63a22a8545723d9b22990715a03cb740e9ae30f5009b44f4604b547f1c0c1e2"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.039028 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.040132 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" event={"ID":"683c5938-459a-4c60-bb98-8237f6ddc4f6","Type":"ContainerStarted","Data":"bde59fb6d9beaca44287a5c285d0b64799572c98c478c63f4b25d25c4ce10a60"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.041353 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" event={"ID":"8e7074a0-bae6-49e7-8915-c4cb3242108d","Type":"ContainerStarted","Data":"7b33634d67c8e5c6eea0a09a93894dbeeffc3c8d652770f057543e423cc9d5b0"} Dec 05 15:14:42 crc kubenswrapper[4840]: E1205 15:14:42.042286 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" podUID="8e7074a0-bae6-49e7-8915-c4cb3242108d" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.062347 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" event={"ID":"9f897b64-3aeb-44c6-a340-9e0082876e93","Type":"ContainerStarted","Data":"ead6b5cb8f78e0a710ac5d45cf549646c0199bd1c3b2bec5d5a330b85a6d9c1b"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.063018 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.065619 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" event={"ID":"c15ac393-953d-45e9-b8dc-7212c6e2366b","Type":"ContainerStarted","Data":"25a833363de824fb0c3450fe03a9236777027b656757bce51054253e2cb6b3fc"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.079977 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-5v888" podStartSLOduration=39.180373096 podStartE2EDuration="59.079958502s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.223099306 +0000 UTC m=+905.564161920" lastFinishedPulling="2025-12-05 15:14:07.122684662 +0000 UTC m=+925.463747326" observedRunningTime="2025-12-05 15:14:42.075266269 +0000 UTC m=+960.416328883" watchObservedRunningTime="2025-12-05 15:14:42.079958502 +0000 UTC m=+960.421021106" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.104600 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" event={"ID":"df20a713-1c9f-4738-8401-ddff0dcf0c38","Type":"ContainerStarted","Data":"e692a87549e9aa6f55f17d0bdb674831aed23b291036c203cacc26b8cbd1af9c"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.104641 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" event={"ID":"ae41b596-75b2-46ab-b95a-ef7b41f1e66b","Type":"ContainerStarted","Data":"fb809065f31b0f613df5a573d8ea0a1e394cd2ff2aa6fb5ebfd8c79365fb90a1"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.104659 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.117173 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" event={"ID":"31eda6ed-1dee-4670-a6d3-22871423db53","Type":"ContainerStarted","Data":"473af86a45ee4bf6f38734e82d36b93dabe411e62e64bc8e1158329d840226b4"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.117966 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.118658 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" podStartSLOduration=7.013126762 podStartE2EDuration="59.118639298s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.225138013 +0000 UTC m=+905.566200627" lastFinishedPulling="2025-12-05 15:14:39.330650549 +0000 UTC m=+957.671713163" observedRunningTime="2025-12-05 15:14:42.115838698 +0000 UTC m=+960.456901312" watchObservedRunningTime="2025-12-05 15:14:42.118639298 +0000 UTC m=+960.459701902" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.133749 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" event={"ID":"919c5760-f9dd-4b40-9b91-ea3b11d13a26","Type":"ContainerStarted","Data":"caaec3e81ae5050afe7e4ad89b91ee15c46821699c419fd1e3d93b5f834a6da4"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.135460 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.139259 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.168314 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-n6chf" podStartSLOduration=39.281866761 podStartE2EDuration="59.168293424s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.227979424 +0000 UTC m=+905.569042038" lastFinishedPulling="2025-12-05 15:14:07.114406087 +0000 UTC m=+925.455468701" observedRunningTime="2025-12-05 15:14:42.164577898 +0000 UTC m=+960.505640512" watchObservedRunningTime="2025-12-05 15:14:42.168293424 +0000 UTC m=+960.509356038" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.168967 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" event={"ID":"387ba01a-b726-4c0c-b3ab-160be43d9587","Type":"ContainerStarted","Data":"fd8f642b5c492c4e417931a1498fceb4e019a8bb99c330a865c0a37558de33aa"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.170991 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.177375 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.408743 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" event={"ID":"f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08","Type":"ContainerStarted","Data":"cd3aabc03452bcd6ae7093ea3ffd7e19801c1959cb6ceef80c346f0d90113520"} Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.424743 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.431406 4840 generic.go:334] "Generic (PLEG): container finished" podID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerID="ca23fa2f0ae720a8b0e15f4a88a88ad0c9085bda59ac8ea72db9a9e7453687a2" exitCode=0 Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.431494 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerDied","Data":"ca23fa2f0ae720a8b0e15f4a88a88ad0c9085bda59ac8ea72db9a9e7453687a2"} Dec 05 15:14:42 crc kubenswrapper[4840]: E1205 15:14:42.438754 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" podUID="a18cf365-d0a8-4750-b11e-12d608ceb0e9" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.440576 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-chg27" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.444703 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xgflv" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.587559 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-qz4dx" podStartSLOduration=39.071266736 podStartE2EDuration="59.587539526s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.113408679 +0000 UTC m=+905.454471293" lastFinishedPulling="2025-12-05 15:14:07.629681479 +0000 UTC m=+925.970744083" observedRunningTime="2025-12-05 15:14:42.475264236 +0000 UTC m=+960.816326850" watchObservedRunningTime="2025-12-05 15:14:42.587539526 +0000 UTC m=+960.928602140" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.591016 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-g5t4m" podStartSLOduration=5.899611028 podStartE2EDuration="58.591007024s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.444791723 +0000 UTC m=+905.785854337" lastFinishedPulling="2025-12-05 15:14:40.136187719 +0000 UTC m=+958.477250333" observedRunningTime="2025-12-05 15:14:42.590262503 +0000 UTC m=+960.931325117" watchObservedRunningTime="2025-12-05 15:14:42.591007024 +0000 UTC m=+960.932069638" Dec 05 15:14:42 crc kubenswrapper[4840]: I1205 15:14:42.658353 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-8vw7g" podStartSLOduration=7.001725889 podStartE2EDuration="59.65833349s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.054206723 +0000 UTC m=+905.395269337" lastFinishedPulling="2025-12-05 15:14:39.710814324 +0000 UTC m=+958.051876938" observedRunningTime="2025-12-05 15:14:42.657179228 +0000 UTC m=+960.998241852" watchObservedRunningTime="2025-12-05 15:14:42.65833349 +0000 UTC m=+960.999396104" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.215532 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" podStartSLOduration=28.2347454 podStartE2EDuration="59.215512888s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.445180264 +0000 UTC m=+905.786242878" lastFinishedPulling="2025-12-05 15:14:18.425947752 +0000 UTC m=+936.767010366" observedRunningTime="2025-12-05 15:14:43.140203496 +0000 UTC m=+961.481266100" watchObservedRunningTime="2025-12-05 15:14:43.215512888 +0000 UTC m=+961.556575502" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.256056 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" podStartSLOduration=7.01491519 podStartE2EDuration="59.256039346s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.331604158 +0000 UTC m=+905.672666772" lastFinishedPulling="2025-12-05 15:14:39.572728314 +0000 UTC m=+957.913790928" observedRunningTime="2025-12-05 15:14:43.211889706 +0000 UTC m=+961.552952320" watchObservedRunningTime="2025-12-05 15:14:43.256039346 +0000 UTC m=+961.597101960" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.505327 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" event={"ID":"6edb6d08-9885-457f-8642-ef77c64de97a","Type":"ContainerStarted","Data":"b76e3c0fdf298eec5b2257793f49d64eb81595e64a29798c833fb0c209b31eca"} Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.519332 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-q5ksl" podStartSLOduration=7.358762936 podStartE2EDuration="59.51930804s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.353145228 +0000 UTC m=+905.694207842" lastFinishedPulling="2025-12-05 15:14:39.513690332 +0000 UTC m=+957.854752946" observedRunningTime="2025-12-05 15:14:43.488691344 +0000 UTC m=+961.829753948" watchObservedRunningTime="2025-12-05 15:14:43.51930804 +0000 UTC m=+961.860370654" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.521146 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8jdn" event={"ID":"df1dca25-320c-4c19-a6c7-b113a3ec4de4","Type":"ContainerStarted","Data":"d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b"} Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.542905 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" event={"ID":"ae41b596-75b2-46ab-b95a-ef7b41f1e66b","Type":"ContainerStarted","Data":"40f1e332877c06eeaef38c860f19c181d53245e85ffef1e09ad27cfb4539c73c"} Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.543850 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.553763 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" event={"ID":"c79f46e0-5947-4b4a-b581-0e49736fb41f","Type":"ContainerStarted","Data":"5f3ce8d4f18ea6d3820dd89e0f5d025a24f8504cca84805223e139ac1a2703d5"} Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.554692 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.577046 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" event={"ID":"f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08","Type":"ContainerStarted","Data":"1d90f3991b25e9764df6b03b7c46ccc7d456b0b8a550f576d2597c4ead2f1a37"} Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.577967 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.593158 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerStarted","Data":"4fddf69981eb33f5caa2aab31fea8cdacb07098615863b13c1c218b630c3afa8"} Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.604731 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" event={"ID":"c15ac393-953d-45e9-b8dc-7212c6e2366b","Type":"ContainerStarted","Data":"bd961bd4c3eb0afcd97a17ca6f1a836281cee058767949f3ce3a064d74410b76"} Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.604775 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" Dec 05 15:14:43 crc kubenswrapper[4840]: E1205 15:14:43.608197 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" podUID="8e7074a0-bae6-49e7-8915-c4cb3242108d" Dec 05 15:14:43 crc kubenswrapper[4840]: I1205 15:14:43.753610 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" podStartSLOduration=7.871683314 podStartE2EDuration="1m0.753590565s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:46.913663923 +0000 UTC m=+905.254726537" lastFinishedPulling="2025-12-05 15:14:39.795571174 +0000 UTC m=+958.136633788" observedRunningTime="2025-12-05 15:14:43.747018508 +0000 UTC m=+962.088081122" watchObservedRunningTime="2025-12-05 15:14:43.753590565 +0000 UTC m=+962.094653179" Dec 05 15:14:44 crc kubenswrapper[4840]: I1205 15:14:44.305527 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" podStartSLOduration=8.789575896 podStartE2EDuration="1m1.305510404s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.347703634 +0000 UTC m=+905.688766248" lastFinishedPulling="2025-12-05 15:14:39.863638122 +0000 UTC m=+958.204700756" observedRunningTime="2025-12-05 15:14:44.300303196 +0000 UTC m=+962.641365810" watchObservedRunningTime="2025-12-05 15:14:44.305510404 +0000 UTC m=+962.646573018" Dec 05 15:14:44 crc kubenswrapper[4840]: I1205 15:14:44.306710 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" podStartSLOduration=8.587131942 podStartE2EDuration="1m1.306703997s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:46.852337486 +0000 UTC m=+905.193400100" lastFinishedPulling="2025-12-05 15:14:39.571909541 +0000 UTC m=+957.912972155" observedRunningTime="2025-12-05 15:14:44.188589963 +0000 UTC m=+962.529652577" watchObservedRunningTime="2025-12-05 15:14:44.306703997 +0000 UTC m=+962.647766611" Dec 05 15:14:44 crc kubenswrapper[4840]: I1205 15:14:44.351248 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" podStartSLOduration=8.163641578 podStartE2EDuration="1m0.351228128s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.544646461 +0000 UTC m=+905.885709075" lastFinishedPulling="2025-12-05 15:14:39.732233011 +0000 UTC m=+958.073295625" observedRunningTime="2025-12-05 15:14:44.347239295 +0000 UTC m=+962.688301909" watchObservedRunningTime="2025-12-05 15:14:44.351228128 +0000 UTC m=+962.692290742" Dec 05 15:14:44 crc kubenswrapper[4840]: I1205 15:14:44.387697 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-q2pp7" Dec 05 15:14:44 crc kubenswrapper[4840]: I1205 15:14:44.824029 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" podStartSLOduration=9.370572818 podStartE2EDuration="1m1.824013026s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:13:47.21267932 +0000 UTC m=+905.553741934" lastFinishedPulling="2025-12-05 15:14:39.666119528 +0000 UTC m=+958.007182142" observedRunningTime="2025-12-05 15:14:44.819013615 +0000 UTC m=+963.160076239" watchObservedRunningTime="2025-12-05 15:14:44.824013026 +0000 UTC m=+963.165075640" Dec 05 15:14:44 crc kubenswrapper[4840]: I1205 15:14:44.975153 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-fvrzr" Dec 05 15:14:45 crc kubenswrapper[4840]: I1205 15:14:45.258967 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-spvpr" Dec 05 15:14:45 crc kubenswrapper[4840]: I1205 15:14:45.495842 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8jvnm" Dec 05 15:14:45 crc kubenswrapper[4840]: I1205 15:14:45.954779 4840 generic.go:334] "Generic (PLEG): container finished" podID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerID="4fddf69981eb33f5caa2aab31fea8cdacb07098615863b13c1c218b630c3afa8" exitCode=0 Dec 05 15:14:45 crc kubenswrapper[4840]: I1205 15:14:45.954827 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerDied","Data":"4fddf69981eb33f5caa2aab31fea8cdacb07098615863b13c1c218b630c3afa8"} Dec 05 15:14:45 crc kubenswrapper[4840]: I1205 15:14:45.971001 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:14:46 crc kubenswrapper[4840]: I1205 15:14:46.969089 4840 generic.go:334] "Generic (PLEG): container finished" podID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerID="d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b" exitCode=0 Dec 05 15:14:46 crc kubenswrapper[4840]: I1205 15:14:46.969177 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8jdn" event={"ID":"df1dca25-320c-4c19-a6c7-b113a3ec4de4","Type":"ContainerDied","Data":"d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b"} Dec 05 15:14:48 crc kubenswrapper[4840]: I1205 15:14:48.191729 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerStarted","Data":"246e86458d6cfb28ba869ab3cc2876e0e302f9d05b76dd44d709e03fc11bb785"} Dec 05 15:14:48 crc kubenswrapper[4840]: I1205 15:14:48.193674 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8jdn" event={"ID":"df1dca25-320c-4c19-a6c7-b113a3ec4de4","Type":"ContainerStarted","Data":"e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc"} Dec 05 15:14:48 crc kubenswrapper[4840]: I1205 15:14:48.218961 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5x2cw" podStartSLOduration=8.02131896 podStartE2EDuration="14.218939571s" podCreationTimestamp="2025-12-05 15:14:34 +0000 UTC" firstStartedPulling="2025-12-05 15:14:40.579297247 +0000 UTC m=+958.920359861" lastFinishedPulling="2025-12-05 15:14:46.776917858 +0000 UTC m=+965.117980472" observedRunningTime="2025-12-05 15:14:48.216345598 +0000 UTC m=+966.557408212" watchObservedRunningTime="2025-12-05 15:14:48.218939571 +0000 UTC m=+966.560002185" Dec 05 15:14:48 crc kubenswrapper[4840]: I1205 15:14:48.239333 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c8jdn" podStartSLOduration=30.380811093 podStartE2EDuration="37.239316188s" podCreationTimestamp="2025-12-05 15:14:11 +0000 UTC" firstStartedPulling="2025-12-05 15:14:40.555125842 +0000 UTC m=+958.896188456" lastFinishedPulling="2025-12-05 15:14:47.413630937 +0000 UTC m=+965.754693551" observedRunningTime="2025-12-05 15:14:48.236129748 +0000 UTC m=+966.577192362" watchObservedRunningTime="2025-12-05 15:14:48.239316188 +0000 UTC m=+966.580378802" Dec 05 15:14:49 crc kubenswrapper[4840]: I1205 15:14:49.471745 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:14:49 crc kubenswrapper[4840]: I1205 15:14:49.472260 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:14:52 crc kubenswrapper[4840]: I1205 15:14:52.117199 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:52 crc kubenswrapper[4840]: I1205 15:14:52.117500 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:52 crc kubenswrapper[4840]: I1205 15:14:52.198234 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:52 crc kubenswrapper[4840]: I1205 15:14:52.232969 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqgw" event={"ID":"6da7f018-2512-4ff2-8824-f90c5a0d5abf","Type":"ContainerDied","Data":"fab1e984ab031cf37f1d92a8aaa18bd42e111595c8cabea4f30a338cfce7f037"} Dec 05 15:14:52 crc kubenswrapper[4840]: I1205 15:14:52.233292 4840 generic.go:334] "Generic (PLEG): container finished" podID="6da7f018-2512-4ff2-8824-f90c5a0d5abf" containerID="fab1e984ab031cf37f1d92a8aaa18bd42e111595c8cabea4f30a338cfce7f037" exitCode=0 Dec 05 15:14:52 crc kubenswrapper[4840]: I1205 15:14:52.290421 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:53 crc kubenswrapper[4840]: I1205 15:14:53.444807 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8jdn"] Dec 05 15:14:53 crc kubenswrapper[4840]: I1205 15:14:53.712902 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7zmt7" Dec 05 15:14:53 crc kubenswrapper[4840]: I1205 15:14:53.762893 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-fwvd2" Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.190062 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.192713 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-zjbwl" Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.248980 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scqgw" event={"ID":"6da7f018-2512-4ff2-8824-f90c5a0d5abf","Type":"ContainerStarted","Data":"0aba9effc92b004586ef1ce74a1cb0629316156b124c5de20a0c8e0d692bd2c4"} Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.249578 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c8jdn" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="registry-server" containerID="cri-o://e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc" gracePeriod=2 Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.265505 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-scqgw" podStartSLOduration=25.073298698 podStartE2EDuration="39.265484634s" podCreationTimestamp="2025-12-05 15:14:15 +0000 UTC" firstStartedPulling="2025-12-05 15:14:39.513693422 +0000 UTC m=+957.854756036" lastFinishedPulling="2025-12-05 15:14:53.705879358 +0000 UTC m=+972.046941972" observedRunningTime="2025-12-05 15:14:54.264066584 +0000 UTC m=+972.605129198" watchObservedRunningTime="2025-12-05 15:14:54.265484634 +0000 UTC m=+972.606547258" Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.517425 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.517494 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.563598 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:54 crc kubenswrapper[4840]: I1205 15:14:54.564725 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-spqrr" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.156016 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.257976 4840 generic.go:334] "Generic (PLEG): container finished" podID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerID="e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc" exitCode=0 Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.258077 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8jdn" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.258057 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8jdn" event={"ID":"df1dca25-320c-4c19-a6c7-b113a3ec4de4","Type":"ContainerDied","Data":"e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc"} Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.258154 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8jdn" event={"ID":"df1dca25-320c-4c19-a6c7-b113a3ec4de4","Type":"ContainerDied","Data":"6baf5c0ff9f1008f1bce6b0c111f7fe695f3ef874820a4c4e3dc4ac0ccdfc5da"} Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.258190 4840 scope.go:117] "RemoveContainer" containerID="e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.272056 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-utilities\") pod \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.272133 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-catalog-content\") pod \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.272191 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6c8l9\" (UniqueName: \"kubernetes.io/projected/df1dca25-320c-4c19-a6c7-b113a3ec4de4-kube-api-access-6c8l9\") pod \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\" (UID: \"df1dca25-320c-4c19-a6c7-b113a3ec4de4\") " Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.273349 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-utilities" (OuterVolumeSpecName: "utilities") pod "df1dca25-320c-4c19-a6c7-b113a3ec4de4" (UID: "df1dca25-320c-4c19-a6c7-b113a3ec4de4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.273856 4840 scope.go:117] "RemoveContainer" containerID="d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.279653 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df1dca25-320c-4c19-a6c7-b113a3ec4de4-kube-api-access-6c8l9" (OuterVolumeSpecName: "kube-api-access-6c8l9") pod "df1dca25-320c-4c19-a6c7-b113a3ec4de4" (UID: "df1dca25-320c-4c19-a6c7-b113a3ec4de4"). InnerVolumeSpecName "kube-api-access-6c8l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.312729 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.325224 4840 scope.go:117] "RemoveContainer" containerID="90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.328082 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df1dca25-320c-4c19-a6c7-b113a3ec4de4" (UID: "df1dca25-320c-4c19-a6c7-b113a3ec4de4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.356741 4840 scope.go:117] "RemoveContainer" containerID="e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc" Dec 05 15:14:55 crc kubenswrapper[4840]: E1205 15:14:55.357143 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc\": container with ID starting with e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc not found: ID does not exist" containerID="e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.357169 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc"} err="failed to get container status \"e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc\": rpc error: code = NotFound desc = could not find container \"e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc\": container with ID starting with e4f66e0a94c42cfc6e890bd077df0955fe9c76053fefecde909974a60c30d8dc not found: ID does not exist" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.357188 4840 scope.go:117] "RemoveContainer" containerID="d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b" Dec 05 15:14:55 crc kubenswrapper[4840]: E1205 15:14:55.357365 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b\": container with ID starting with d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b not found: ID does not exist" containerID="d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.357382 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b"} err="failed to get container status \"d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b\": rpc error: code = NotFound desc = could not find container \"d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b\": container with ID starting with d33c0083005777325ad317c594e558d1f92771bfaaf36f68077ec2424f842a6b not found: ID does not exist" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.357394 4840 scope.go:117] "RemoveContainer" containerID="90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a" Dec 05 15:14:55 crc kubenswrapper[4840]: E1205 15:14:55.357556 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a\": container with ID starting with 90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a not found: ID does not exist" containerID="90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.357578 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a"} err="failed to get container status \"90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a\": rpc error: code = NotFound desc = could not find container \"90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a\": container with ID starting with 90d95b9abca22f806287f13d5748ed1c79945684b0574dad650e2f733b54a05a not found: ID does not exist" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.373488 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.373971 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df1dca25-320c-4c19-a6c7-b113a3ec4de4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.374084 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6c8l9\" (UniqueName: \"kubernetes.io/projected/df1dca25-320c-4c19-a6c7-b113a3ec4de4-kube-api-access-6c8l9\") on node \"crc\" DevicePath \"\"" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.602821 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8jdn"] Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.608449 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c8jdn"] Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.977313 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:55 crc kubenswrapper[4840]: I1205 15:14:55.977361 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:56 crc kubenswrapper[4840]: I1205 15:14:56.031285 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:14:56 crc kubenswrapper[4840]: I1205 15:14:56.076546 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" path="/var/lib/kubelet/pods/df1dca25-320c-4c19-a6c7-b113a3ec4de4/volumes" Dec 05 15:14:56 crc kubenswrapper[4840]: I1205 15:14:56.267426 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" event={"ID":"a18cf365-d0a8-4750-b11e-12d608ceb0e9","Type":"ContainerStarted","Data":"ce3746e9162ce7f99c33377d4ea2d025572467efc145bc1edd8a3674ab89634d"} Dec 05 15:14:56 crc kubenswrapper[4840]: I1205 15:14:56.267971 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:14:56 crc kubenswrapper[4840]: I1205 15:14:56.292363 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" podStartSLOduration=34.733435679 podStartE2EDuration="1m12.29234521s" podCreationTimestamp="2025-12-05 15:13:44 +0000 UTC" firstStartedPulling="2025-12-05 15:14:17.99683724 +0000 UTC m=+936.337899854" lastFinishedPulling="2025-12-05 15:14:55.555746721 +0000 UTC m=+973.896809385" observedRunningTime="2025-12-05 15:14:56.289330515 +0000 UTC m=+974.630393129" watchObservedRunningTime="2025-12-05 15:14:56.29234521 +0000 UTC m=+974.633407824" Dec 05 15:14:57 crc kubenswrapper[4840]: I1205 15:14:57.233467 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x2cw"] Dec 05 15:14:57 crc kubenswrapper[4840]: I1205 15:14:57.278439 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5x2cw" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="registry-server" containerID="cri-o://246e86458d6cfb28ba869ab3cc2876e0e302f9d05b76dd44d709e03fc11bb785" gracePeriod=2 Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.296257 4840 generic.go:334] "Generic (PLEG): container finished" podID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerID="246e86458d6cfb28ba869ab3cc2876e0e302f9d05b76dd44d709e03fc11bb785" exitCode=0 Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.296729 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerDied","Data":"246e86458d6cfb28ba869ab3cc2876e0e302f9d05b76dd44d709e03fc11bb785"} Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.296829 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x2cw" event={"ID":"980c2743-6e13-40d7-8752-8f3fefc3fefb","Type":"ContainerDied","Data":"163dad06520a6217966cf7bd510c999da9d1da13bad01f487db71414f381c788"} Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.296844 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="163dad06520a6217966cf7bd510c999da9d1da13bad01f487db71414f381c788" Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.322071 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.461971 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-catalog-content\") pod \"980c2743-6e13-40d7-8752-8f3fefc3fefb\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.462115 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6c2vd\" (UniqueName: \"kubernetes.io/projected/980c2743-6e13-40d7-8752-8f3fefc3fefb-kube-api-access-6c2vd\") pod \"980c2743-6e13-40d7-8752-8f3fefc3fefb\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.462202 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-utilities\") pod \"980c2743-6e13-40d7-8752-8f3fefc3fefb\" (UID: \"980c2743-6e13-40d7-8752-8f3fefc3fefb\") " Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.463224 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-utilities" (OuterVolumeSpecName: "utilities") pod "980c2743-6e13-40d7-8752-8f3fefc3fefb" (UID: "980c2743-6e13-40d7-8752-8f3fefc3fefb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.470126 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/980c2743-6e13-40d7-8752-8f3fefc3fefb-kube-api-access-6c2vd" (OuterVolumeSpecName: "kube-api-access-6c2vd") pod "980c2743-6e13-40d7-8752-8f3fefc3fefb" (UID: "980c2743-6e13-40d7-8752-8f3fefc3fefb"). InnerVolumeSpecName "kube-api-access-6c2vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.519161 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "980c2743-6e13-40d7-8752-8f3fefc3fefb" (UID: "980c2743-6e13-40d7-8752-8f3fefc3fefb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.563363 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6c2vd\" (UniqueName: \"kubernetes.io/projected/980c2743-6e13-40d7-8752-8f3fefc3fefb-kube-api-access-6c2vd\") on node \"crc\" DevicePath \"\"" Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.563396 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:14:58 crc kubenswrapper[4840]: I1205 15:14:58.563407 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/980c2743-6e13-40d7-8752-8f3fefc3fefb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:14:59 crc kubenswrapper[4840]: I1205 15:14:59.307201 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" event={"ID":"8e7074a0-bae6-49e7-8915-c4cb3242108d","Type":"ContainerStarted","Data":"7b2ab10231dc84e4ac3b69ceb418d3a62360978bdd96393ad83f8c3586db6750"} Dec 05 15:14:59 crc kubenswrapper[4840]: I1205 15:14:59.307252 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x2cw" Dec 05 15:14:59 crc kubenswrapper[4840]: I1205 15:14:59.307790 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:14:59 crc kubenswrapper[4840]: I1205 15:14:59.343165 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" podStartSLOduration=36.089105529 podStartE2EDuration="1m16.343139141s" podCreationTimestamp="2025-12-05 15:13:43 +0000 UTC" firstStartedPulling="2025-12-05 15:14:18.481165564 +0000 UTC m=+936.822228178" lastFinishedPulling="2025-12-05 15:14:58.735199166 +0000 UTC m=+977.076261790" observedRunningTime="2025-12-05 15:14:59.340712772 +0000 UTC m=+977.681775396" watchObservedRunningTime="2025-12-05 15:14:59.343139141 +0000 UTC m=+977.684201755" Dec 05 15:14:59 crc kubenswrapper[4840]: I1205 15:14:59.361070 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x2cw"] Dec 05 15:14:59 crc kubenswrapper[4840]: I1205 15:14:59.366046 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5x2cw"] Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.083422 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" path="/var/lib/kubelet/pods/980c2743-6e13-40d7-8752-8f3fefc3fefb/volumes" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.177645 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6"] Dec 05 15:15:00 crc kubenswrapper[4840]: E1205 15:15:00.178000 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="extract-content" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178020 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="extract-content" Dec 05 15:15:00 crc kubenswrapper[4840]: E1205 15:15:00.178059 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="extract-utilities" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178069 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="extract-utilities" Dec 05 15:15:00 crc kubenswrapper[4840]: E1205 15:15:00.178090 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="extract-content" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178099 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="extract-content" Dec 05 15:15:00 crc kubenswrapper[4840]: E1205 15:15:00.178116 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="extract-utilities" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178125 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="extract-utilities" Dec 05 15:15:00 crc kubenswrapper[4840]: E1205 15:15:00.178143 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="registry-server" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178151 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="registry-server" Dec 05 15:15:00 crc kubenswrapper[4840]: E1205 15:15:00.178171 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="registry-server" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178178 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="registry-server" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178348 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="980c2743-6e13-40d7-8752-8f3fefc3fefb" containerName="registry-server" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178365 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="df1dca25-320c-4c19-a6c7-b113a3ec4de4" containerName="registry-server" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.178957 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.181070 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.181168 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.192942 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6"] Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.286615 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzp9x\" (UniqueName: \"kubernetes.io/projected/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-kube-api-access-fzp9x\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.286918 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-config-volume\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.287040 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-secret-volume\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.514439 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-config-volume\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.514496 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-secret-volume\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.514598 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzp9x\" (UniqueName: \"kubernetes.io/projected/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-kube-api-access-fzp9x\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.517488 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-config-volume\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.520596 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-secret-volume\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.538919 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzp9x\" (UniqueName: \"kubernetes.io/projected/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-kube-api-access-fzp9x\") pod \"collect-profiles-29415795-dxxp6\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:00 crc kubenswrapper[4840]: I1205 15:15:00.794608 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:01 crc kubenswrapper[4840]: I1205 15:15:01.030368 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt" Dec 05 15:15:01 crc kubenswrapper[4840]: I1205 15:15:01.415173 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6"] Dec 05 15:15:01 crc kubenswrapper[4840]: W1205 15:15:01.421033 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb286c3cb_717a_4be7_aecf_f2eaa8732dfd.slice/crio-80ac76c7858677d2aa5f3fef555324a7fd6b48e95ce402f8323bee90ac832bcd WatchSource:0}: Error finding container 80ac76c7858677d2aa5f3fef555324a7fd6b48e95ce402f8323bee90ac832bcd: Status 404 returned error can't find the container with id 80ac76c7858677d2aa5f3fef555324a7fd6b48e95ce402f8323bee90ac832bcd Dec 05 15:15:02 crc kubenswrapper[4840]: I1205 15:15:02.329252 4840 generic.go:334] "Generic (PLEG): container finished" podID="b286c3cb-717a-4be7-aecf-f2eaa8732dfd" containerID="dab6017c79880ed760c34ba5d16644e1fc182a492c9f84db6b506e36d7c609ca" exitCode=0 Dec 05 15:15:02 crc kubenswrapper[4840]: I1205 15:15:02.329351 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" event={"ID":"b286c3cb-717a-4be7-aecf-f2eaa8732dfd","Type":"ContainerDied","Data":"dab6017c79880ed760c34ba5d16644e1fc182a492c9f84db6b506e36d7c609ca"} Dec 05 15:15:02 crc kubenswrapper[4840]: I1205 15:15:02.329520 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" event={"ID":"b286c3cb-717a-4be7-aecf-f2eaa8732dfd","Type":"ContainerStarted","Data":"80ac76c7858677d2aa5f3fef555324a7fd6b48e95ce402f8323bee90ac832bcd"} Dec 05 15:15:03 crc kubenswrapper[4840]: I1205 15:15:03.867554 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.065436 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzp9x\" (UniqueName: \"kubernetes.io/projected/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-kube-api-access-fzp9x\") pod \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.065479 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-secret-volume\") pod \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.065540 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-config-volume\") pod \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\" (UID: \"b286c3cb-717a-4be7-aecf-f2eaa8732dfd\") " Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.066540 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-config-volume" (OuterVolumeSpecName: "config-volume") pod "b286c3cb-717a-4be7-aecf-f2eaa8732dfd" (UID: "b286c3cb-717a-4be7-aecf-f2eaa8732dfd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.074608 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b286c3cb-717a-4be7-aecf-f2eaa8732dfd" (UID: "b286c3cb-717a-4be7-aecf-f2eaa8732dfd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.078829 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-kube-api-access-fzp9x" (OuterVolumeSpecName: "kube-api-access-fzp9x") pod "b286c3cb-717a-4be7-aecf-f2eaa8732dfd" (UID: "b286c3cb-717a-4be7-aecf-f2eaa8732dfd"). InnerVolumeSpecName "kube-api-access-fzp9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.166932 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzp9x\" (UniqueName: \"kubernetes.io/projected/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-kube-api-access-fzp9x\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.166982 4840 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.167007 4840 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b286c3cb-717a-4be7-aecf-f2eaa8732dfd-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.345337 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" event={"ID":"b286c3cb-717a-4be7-aecf-f2eaa8732dfd","Type":"ContainerDied","Data":"80ac76c7858677d2aa5f3fef555324a7fd6b48e95ce402f8323bee90ac832bcd"} Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.345387 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80ac76c7858677d2aa5f3fef555324a7fd6b48e95ce402f8323bee90ac832bcd" Dec 05 15:15:04 crc kubenswrapper[4840]: I1205 15:15:04.345420 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6" Dec 05 15:15:06 crc kubenswrapper[4840]: I1205 15:15:06.022567 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-scqgw" Dec 05 15:15:06 crc kubenswrapper[4840]: I1205 15:15:06.089708 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scqgw"] Dec 05 15:15:06 crc kubenswrapper[4840]: I1205 15:15:06.138983 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmzgl"] Dec 05 15:15:06 crc kubenswrapper[4840]: I1205 15:15:06.139431 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hmzgl" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="registry-server" containerID="cri-o://be2414039a111f9b68cb77acd4316c7d65672af01e7664b180530d38ae3f30f0" gracePeriod=2 Dec 05 15:15:06 crc kubenswrapper[4840]: I1205 15:15:06.363635 4840 generic.go:334] "Generic (PLEG): container finished" podID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerID="be2414039a111f9b68cb77acd4316c7d65672af01e7664b180530d38ae3f30f0" exitCode=0 Dec 05 15:15:06 crc kubenswrapper[4840]: I1205 15:15:06.363711 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmzgl" event={"ID":"df7e1030-5127-493c-ab1a-a8f8ce069e83","Type":"ContainerDied","Data":"be2414039a111f9b68cb77acd4316c7d65672af01e7664b180530d38ae3f30f0"} Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.016354 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.203238 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bps5d\" (UniqueName: \"kubernetes.io/projected/df7e1030-5127-493c-ab1a-a8f8ce069e83-kube-api-access-bps5d\") pod \"df7e1030-5127-493c-ab1a-a8f8ce069e83\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.203373 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-utilities\") pod \"df7e1030-5127-493c-ab1a-a8f8ce069e83\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.203441 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-catalog-content\") pod \"df7e1030-5127-493c-ab1a-a8f8ce069e83\" (UID: \"df7e1030-5127-493c-ab1a-a8f8ce069e83\") " Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.204442 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-utilities" (OuterVolumeSpecName: "utilities") pod "df7e1030-5127-493c-ab1a-a8f8ce069e83" (UID: "df7e1030-5127-493c-ab1a-a8f8ce069e83"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.208067 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df7e1030-5127-493c-ab1a-a8f8ce069e83-kube-api-access-bps5d" (OuterVolumeSpecName: "kube-api-access-bps5d") pod "df7e1030-5127-493c-ab1a-a8f8ce069e83" (UID: "df7e1030-5127-493c-ab1a-a8f8ce069e83"). InnerVolumeSpecName "kube-api-access-bps5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.224447 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df7e1030-5127-493c-ab1a-a8f8ce069e83" (UID: "df7e1030-5127-493c-ab1a-a8f8ce069e83"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.305071 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.305417 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bps5d\" (UniqueName: \"kubernetes.io/projected/df7e1030-5127-493c-ab1a-a8f8ce069e83-kube-api-access-bps5d\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.305433 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7e1030-5127-493c-ab1a-a8f8ce069e83-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.371606 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmzgl" event={"ID":"df7e1030-5127-493c-ab1a-a8f8ce069e83","Type":"ContainerDied","Data":"44d5e36fe4d4b7c804a5fdf15a997852bd031ca911260cf3b6c4fad59c6efba4"} Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.371657 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmzgl" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.371673 4840 scope.go:117] "RemoveContainer" containerID="be2414039a111f9b68cb77acd4316c7d65672af01e7664b180530d38ae3f30f0" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.393269 4840 scope.go:117] "RemoveContainer" containerID="90ccc15369691e50ed1059498beac37bb4c298328db64c84fcb6cad6374e4bb7" Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.401586 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmzgl"] Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.408666 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmzgl"] Dec 05 15:15:07 crc kubenswrapper[4840]: I1205 15:15:07.421094 4840 scope.go:117] "RemoveContainer" containerID="ae5a39222b2503a505c76060889d168da2c87a679f9c593e3112fbd1b8184b5b" Dec 05 15:15:08 crc kubenswrapper[4840]: I1205 15:15:08.075236 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" path="/var/lib/kubelet/pods/df7e1030-5127-493c-ab1a-a8f8ce069e83/volumes" Dec 05 15:15:09 crc kubenswrapper[4840]: I1205 15:15:09.810589 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-gpcmz" Dec 05 15:15:19 crc kubenswrapper[4840]: I1205 15:15:19.471604 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:15:19 crc kubenswrapper[4840]: I1205 15:15:19.473416 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:15:19 crc kubenswrapper[4840]: I1205 15:15:19.474412 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:15:19 crc kubenswrapper[4840]: I1205 15:15:19.475264 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d69a7eb1c40b94e45105da8261bb07a2f04367f59caef02ba4c0e3aa6dc28e33"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:15:19 crc kubenswrapper[4840]: I1205 15:15:19.475493 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://d69a7eb1c40b94e45105da8261bb07a2f04367f59caef02ba4c0e3aa6dc28e33" gracePeriod=600 Dec 05 15:15:20 crc kubenswrapper[4840]: I1205 15:15:20.475472 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="d69a7eb1c40b94e45105da8261bb07a2f04367f59caef02ba4c0e3aa6dc28e33" exitCode=0 Dec 05 15:15:20 crc kubenswrapper[4840]: I1205 15:15:20.475541 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"d69a7eb1c40b94e45105da8261bb07a2f04367f59caef02ba4c0e3aa6dc28e33"} Dec 05 15:15:20 crc kubenswrapper[4840]: I1205 15:15:20.475830 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"40a488d86bcc9fa72aa671ce4746fbea89ae7ae377bf2c9aff3cd9df6bf2d02c"} Dec 05 15:15:20 crc kubenswrapper[4840]: I1205 15:15:20.475860 4840 scope.go:117] "RemoveContainer" containerID="83830503ae8c68349e9a52130f5a53e1e9c359c7b97c632d824c7d4a08e0e1f1" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.272421 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-brnf4"] Dec 05 15:15:32 crc kubenswrapper[4840]: E1205 15:15:32.273376 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="extract-utilities" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.273627 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="extract-utilities" Dec 05 15:15:32 crc kubenswrapper[4840]: E1205 15:15:32.273638 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="registry-server" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.273645 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="registry-server" Dec 05 15:15:32 crc kubenswrapper[4840]: E1205 15:15:32.273663 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b286c3cb-717a-4be7-aecf-f2eaa8732dfd" containerName="collect-profiles" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.273669 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b286c3cb-717a-4be7-aecf-f2eaa8732dfd" containerName="collect-profiles" Dec 05 15:15:32 crc kubenswrapper[4840]: E1205 15:15:32.273698 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="extract-content" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.273703 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="extract-content" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.273833 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b286c3cb-717a-4be7-aecf-f2eaa8732dfd" containerName="collect-profiles" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.273847 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="df7e1030-5127-493c-ab1a-a8f8ce069e83" containerName="registry-server" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.274791 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.279536 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.279852 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.280025 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-lk75h" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.280174 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.297527 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-brnf4"] Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.361090 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-d5ll6"] Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.363248 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.365433 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.367804 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-d5ll6"] Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.464894 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.464966 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71997911-0710-4c53-b9ce-fe9673740c28-config\") pod \"dnsmasq-dns-675f4bcbfc-brnf4\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.464996 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z44x\" (UniqueName: \"kubernetes.io/projected/299512bd-f667-4602-8ec2-307ecdcbabc4-kube-api-access-2z44x\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.465437 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz4s6\" (UniqueName: \"kubernetes.io/projected/71997911-0710-4c53-b9ce-fe9673740c28-kube-api-access-rz4s6\") pod \"dnsmasq-dns-675f4bcbfc-brnf4\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.465491 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-config\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.566886 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71997911-0710-4c53-b9ce-fe9673740c28-config\") pod \"dnsmasq-dns-675f4bcbfc-brnf4\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.566935 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z44x\" (UniqueName: \"kubernetes.io/projected/299512bd-f667-4602-8ec2-307ecdcbabc4-kube-api-access-2z44x\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.566982 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz4s6\" (UniqueName: \"kubernetes.io/projected/71997911-0710-4c53-b9ce-fe9673740c28-kube-api-access-rz4s6\") pod \"dnsmasq-dns-675f4bcbfc-brnf4\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.567027 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-config\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.567070 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.567779 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71997911-0710-4c53-b9ce-fe9673740c28-config\") pod \"dnsmasq-dns-675f4bcbfc-brnf4\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.567882 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.567964 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-config\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.589735 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz4s6\" (UniqueName: \"kubernetes.io/projected/71997911-0710-4c53-b9ce-fe9673740c28-kube-api-access-rz4s6\") pod \"dnsmasq-dns-675f4bcbfc-brnf4\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.589741 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z44x\" (UniqueName: \"kubernetes.io/projected/299512bd-f667-4602-8ec2-307ecdcbabc4-kube-api-access-2z44x\") pod \"dnsmasq-dns-78dd6ddcc-d5ll6\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.609695 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.676083 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:32 crc kubenswrapper[4840]: I1205 15:15:32.939038 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-d5ll6"] Dec 05 15:15:33 crc kubenswrapper[4840]: I1205 15:15:33.048532 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-brnf4"] Dec 05 15:15:33 crc kubenswrapper[4840]: W1205 15:15:33.052241 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71997911_0710_4c53_b9ce_fe9673740c28.slice/crio-9b12446948db0e300bbc46ca7f60d61d79fe129d9eab7729ea093bd74cae795d WatchSource:0}: Error finding container 9b12446948db0e300bbc46ca7f60d61d79fe129d9eab7729ea093bd74cae795d: Status 404 returned error can't find the container with id 9b12446948db0e300bbc46ca7f60d61d79fe129d9eab7729ea093bd74cae795d Dec 05 15:15:33 crc kubenswrapper[4840]: I1205 15:15:33.580929 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" event={"ID":"299512bd-f667-4602-8ec2-307ecdcbabc4","Type":"ContainerStarted","Data":"5bdaebe222f11b7bf646bd29ceeede22b0dd1c95a24b4ee040bcac489df9d6fb"} Dec 05 15:15:33 crc kubenswrapper[4840]: I1205 15:15:33.582300 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" event={"ID":"71997911-0710-4c53-b9ce-fe9673740c28","Type":"ContainerStarted","Data":"9b12446948db0e300bbc46ca7f60d61d79fe129d9eab7729ea093bd74cae795d"} Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.458549 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-brnf4"] Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.489464 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-btff4"] Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.490901 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.510503 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-btff4"] Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.515917 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-config\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.515965 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkx2x\" (UniqueName: \"kubernetes.io/projected/56638051-dc23-40f8-b1f3-01d17d4a87e9-kube-api-access-qkx2x\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.516096 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-dns-svc\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.618092 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-dns-svc\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.618166 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-config\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.618192 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkx2x\" (UniqueName: \"kubernetes.io/projected/56638051-dc23-40f8-b1f3-01d17d4a87e9-kube-api-access-qkx2x\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.619253 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-dns-svc\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.620049 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-config\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.640403 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkx2x\" (UniqueName: \"kubernetes.io/projected/56638051-dc23-40f8-b1f3-01d17d4a87e9-kube-api-access-qkx2x\") pod \"dnsmasq-dns-666b6646f7-btff4\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.822831 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.857188 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-d5ll6"] Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.910799 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-v7fw8"] Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.919269 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-v7fw8"] Dec 05 15:15:35 crc kubenswrapper[4840]: I1205 15:15:35.919413 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.025323 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.025372 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqtdq\" (UniqueName: \"kubernetes.io/projected/d4640d84-ab2c-41ae-b84b-461d096d1e28-kube-api-access-xqtdq\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.025554 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-config\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.126208 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqtdq\" (UniqueName: \"kubernetes.io/projected/d4640d84-ab2c-41ae-b84b-461d096d1e28-kube-api-access-xqtdq\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.126324 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-config\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.126447 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.128034 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.130332 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-config\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.158648 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqtdq\" (UniqueName: \"kubernetes.io/projected/d4640d84-ab2c-41ae-b84b-461d096d1e28-kube-api-access-xqtdq\") pod \"dnsmasq-dns-57d769cc4f-v7fw8\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.242367 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.733515 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.736488 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.739917 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.750461 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-gwc9c" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.750680 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.750966 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.751095 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.751213 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.751332 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.759703 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838339 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838599 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838618 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838696 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838815 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838886 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-config-data\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838914 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838939 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838980 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.838998 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b2vv\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-kube-api-access-8b2vv\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.839014 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.965823 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.968660 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b2vv\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-kube-api-access-8b2vv\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.969137 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.971327 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.971503 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.971743 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.971990 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.972072 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.972150 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.972346 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.972432 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-config-data\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.972601 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.972817 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.973107 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.974581 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-config-data\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.976092 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:36 crc kubenswrapper[4840]: I1205 15:15:36.977357 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.025116 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.035086 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.057477 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.058009 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.068284 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.079102 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.081826 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b2vv\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-kube-api-access-8b2vv\") pod \"rabbitmq-server-0\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.088401 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.096914 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-btff4"] Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.098936 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-gf9wj" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.099128 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.099310 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.099429 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.100104 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.100218 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.100312 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.103158 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.112659 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-v7fw8"] Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283635 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7jxj\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-kube-api-access-t7jxj\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283687 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f169c577-448f-45db-bcdd-f34f5c24e6bb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283711 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283747 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283780 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283819 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283890 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283909 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f169c577-448f-45db-bcdd-f34f5c24e6bb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283933 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.283990 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.284009 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.381187 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.385808 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f169c577-448f-45db-bcdd-f34f5c24e6bb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.385994 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.386127 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.386277 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.386303 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.386694 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.387099 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.389533 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7jxj\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-kube-api-access-t7jxj\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.402254 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f169c577-448f-45db-bcdd-f34f5c24e6bb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.402464 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.402594 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.402738 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.402838 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.402961 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.392156 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f169c577-448f-45db-bcdd-f34f5c24e6bb-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.403334 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7jxj\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-kube-api-access-t7jxj\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.403565 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.404162 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.405589 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.406460 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.408105 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.409304 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f169c577-448f-45db-bcdd-f34f5c24e6bb-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.413182 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:37 crc kubenswrapper[4840]: I1205 15:15:37.445939 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.388734 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.390534 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.400639 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.404135 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-hr6qw" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.404317 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.405453 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.405601 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.405890 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.622559 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.622969 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r965\" (UniqueName: \"kubernetes.io/projected/b1a586cf-ff08-4975-b172-0167bb10ff77-kube-api-access-4r965\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.623076 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a586cf-ff08-4975-b172-0167bb10ff77-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.623186 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-config-data-default\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.623312 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-kolla-config\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.623477 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.623661 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a586cf-ff08-4975-b172-0167bb10ff77-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.623726 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1a586cf-ff08-4975-b172-0167bb10ff77-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.730800 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-config-data-default\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.730911 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-config-data-default\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.730990 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-kolla-config\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731063 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731125 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1a586cf-ff08-4975-b172-0167bb10ff77-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731169 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a586cf-ff08-4975-b172-0167bb10ff77-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731313 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731384 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r965\" (UniqueName: \"kubernetes.io/projected/b1a586cf-ff08-4975-b172-0167bb10ff77-kube-api-access-4r965\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731415 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a586cf-ff08-4975-b172-0167bb10ff77-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731576 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.731610 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1a586cf-ff08-4975-b172-0167bb10ff77-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.733444 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.734018 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1a586cf-ff08-4975-b172-0167bb10ff77-kolla-config\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.738052 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a586cf-ff08-4975-b172-0167bb10ff77-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.741616 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a586cf-ff08-4975-b172-0167bb10ff77-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.771406 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r965\" (UniqueName: \"kubernetes.io/projected/b1a586cf-ff08-4975-b172-0167bb10ff77-kube-api-access-4r965\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:38 crc kubenswrapper[4840]: I1205 15:15:38.782058 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1a586cf-ff08-4975-b172-0167bb10ff77\") " pod="openstack/openstack-galera-0" Dec 05 15:15:39 crc kubenswrapper[4840]: I1205 15:15:39.017148 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.109339 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.110968 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.111056 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.114191 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-t4nqg" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.114537 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.115608 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.117100 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.262788 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fec705-baad-4e89-94d4-e1e7d64579a1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.262850 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.262897 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/28fec705-baad-4e89-94d4-e1e7d64579a1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.262983 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.263011 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.263044 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.263143 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/28fec705-baad-4e89-94d4-e1e7d64579a1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.263163 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmjx9\" (UniqueName: \"kubernetes.io/projected/28fec705-baad-4e89-94d4-e1e7d64579a1-kube-api-access-dmjx9\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.368810 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.369002 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/28fec705-baad-4e89-94d4-e1e7d64579a1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.369029 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmjx9\" (UniqueName: \"kubernetes.io/projected/28fec705-baad-4e89-94d4-e1e7d64579a1-kube-api-access-dmjx9\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.369048 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fec705-baad-4e89-94d4-e1e7d64579a1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.369079 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.369098 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/28fec705-baad-4e89-94d4-e1e7d64579a1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.369134 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.369154 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.373538 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.373965 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.373573 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/28fec705-baad-4e89-94d4-e1e7d64579a1-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.374816 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.375315 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28fec705-baad-4e89-94d4-e1e7d64579a1-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.377993 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/28fec705-baad-4e89-94d4-e1e7d64579a1-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.425896 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28fec705-baad-4e89-94d4-e1e7d64579a1-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.438081 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmjx9\" (UniqueName: \"kubernetes.io/projected/28fec705-baad-4e89-94d4-e1e7d64579a1-kube-api-access-dmjx9\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.449645 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"28fec705-baad-4e89-94d4-e1e7d64579a1\") " pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.738095 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.739276 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.741104 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.743583 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.743743 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-xmdq6" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.749557 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.763760 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.791380 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd532161-0cce-4e82-b084-45e1569c1575-combined-ca-bundle\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.791465 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4652\" (UniqueName: \"kubernetes.io/projected/bd532161-0cce-4e82-b084-45e1569c1575-kube-api-access-n4652\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.791494 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bd532161-0cce-4e82-b084-45e1569c1575-config-data\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.791534 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd532161-0cce-4e82-b084-45e1569c1575-memcached-tls-certs\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.791560 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd532161-0cce-4e82-b084-45e1569c1575-kolla-config\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.892635 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd532161-0cce-4e82-b084-45e1569c1575-combined-ca-bundle\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.892701 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4652\" (UniqueName: \"kubernetes.io/projected/bd532161-0cce-4e82-b084-45e1569c1575-kube-api-access-n4652\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.892720 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bd532161-0cce-4e82-b084-45e1569c1575-config-data\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.892749 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd532161-0cce-4e82-b084-45e1569c1575-memcached-tls-certs\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.892772 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd532161-0cce-4e82-b084-45e1569c1575-kolla-config\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.893601 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bd532161-0cce-4e82-b084-45e1569c1575-kolla-config\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.893695 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bd532161-0cce-4e82-b084-45e1569c1575-config-data\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.900323 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd532161-0cce-4e82-b084-45e1569c1575-memcached-tls-certs\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.900666 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd532161-0cce-4e82-b084-45e1569c1575-combined-ca-bundle\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:40 crc kubenswrapper[4840]: I1205 15:15:40.915417 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4652\" (UniqueName: \"kubernetes.io/projected/bd532161-0cce-4e82-b084-45e1569c1575-kube-api-access-n4652\") pod \"memcached-0\" (UID: \"bd532161-0cce-4e82-b084-45e1569c1575\") " pod="openstack/memcached-0" Dec 05 15:15:41 crc kubenswrapper[4840]: I1205 15:15:41.188915 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.309029 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.310294 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.313311 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-fphdj" Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.316532 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8wwt\" (UniqueName: \"kubernetes.io/projected/2d91fd7f-f657-458d-9f80-3d915e5fa97d-kube-api-access-n8wwt\") pod \"kube-state-metrics-0\" (UID: \"2d91fd7f-f657-458d-9f80-3d915e5fa97d\") " pod="openstack/kube-state-metrics-0" Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.321079 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.417649 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8wwt\" (UniqueName: \"kubernetes.io/projected/2d91fd7f-f657-458d-9f80-3d915e5fa97d-kube-api-access-n8wwt\") pod \"kube-state-metrics-0\" (UID: \"2d91fd7f-f657-458d-9f80-3d915e5fa97d\") " pod="openstack/kube-state-metrics-0" Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.444965 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8wwt\" (UniqueName: \"kubernetes.io/projected/2d91fd7f-f657-458d-9f80-3d915e5fa97d-kube-api-access-n8wwt\") pod \"kube-state-metrics-0\" (UID: \"2d91fd7f-f657-458d-9f80-3d915e5fa97d\") " pod="openstack/kube-state-metrics-0" Dec 05 15:15:42 crc kubenswrapper[4840]: I1205 15:15:42.671625 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 15:15:43 crc kubenswrapper[4840]: W1205 15:15:43.173173 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4640d84_ab2c_41ae_b84b_461d096d1e28.slice/crio-a0408ed164080bd88ae22dc1e05a4c1922c38b17ab64e831eab6b25a9bd9dcdb WatchSource:0}: Error finding container a0408ed164080bd88ae22dc1e05a4c1922c38b17ab64e831eab6b25a9bd9dcdb: Status 404 returned error can't find the container with id a0408ed164080bd88ae22dc1e05a4c1922c38b17ab64e831eab6b25a9bd9dcdb Dec 05 15:15:43 crc kubenswrapper[4840]: W1205 15:15:43.174511 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56638051_dc23_40f8_b1f3_01d17d4a87e9.slice/crio-235772afffc1ea5413d0fdd02e837dca1073bc6dcd44398327b3c17d37ccf88d WatchSource:0}: Error finding container 235772afffc1ea5413d0fdd02e837dca1073bc6dcd44398327b3c17d37ccf88d: Status 404 returned error can't find the container with id 235772afffc1ea5413d0fdd02e837dca1073bc6dcd44398327b3c17d37ccf88d Dec 05 15:15:43 crc kubenswrapper[4840]: I1205 15:15:43.696743 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" event={"ID":"d4640d84-ab2c-41ae-b84b-461d096d1e28","Type":"ContainerStarted","Data":"a0408ed164080bd88ae22dc1e05a4c1922c38b17ab64e831eab6b25a9bd9dcdb"} Dec 05 15:15:43 crc kubenswrapper[4840]: I1205 15:15:43.697988 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-btff4" event={"ID":"56638051-dc23-40f8-b1f3-01d17d4a87e9","Type":"ContainerStarted","Data":"235772afffc1ea5413d0fdd02e837dca1073bc6dcd44398327b3c17d37ccf88d"} Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.009318 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-cttvn"] Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.010690 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.030456 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.030915 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-w9tjk" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.030823 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.045190 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-rnf5z"] Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.048182 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.105295 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn"] Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.129110 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rnf5z"] Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216106 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab69b71e-d666-46a7-a896-96a70fff685a-scripts\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216155 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-log\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216276 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab69b71e-d666-46a7-a896-96a70fff685a-ovn-controller-tls-certs\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216326 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-log-ovn\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216365 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-run-ovn\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216469 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab69b71e-d666-46a7-a896-96a70fff685a-combined-ca-bundle\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216498 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbrkq\" (UniqueName: \"kubernetes.io/projected/ab69b71e-d666-46a7-a896-96a70fff685a-kube-api-access-zbrkq\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216524 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-run\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216614 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-lib\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216642 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-etc-ovs\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216656 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-run\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216688 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wncjf\" (UniqueName: \"kubernetes.io/projected/f75e819c-db65-4ab4-8530-1390b8a83dd0-kube-api-access-wncjf\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.216797 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f75e819c-db65-4ab4-8530-1390b8a83dd0-scripts\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317518 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-log-ovn\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317570 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-run-ovn\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317614 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab69b71e-d666-46a7-a896-96a70fff685a-combined-ca-bundle\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317640 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbrkq\" (UniqueName: \"kubernetes.io/projected/ab69b71e-d666-46a7-a896-96a70fff685a-kube-api-access-zbrkq\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317679 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-run\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317723 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-lib\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317776 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-etc-ovs\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.317794 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-run\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318495 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-run\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318533 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-run\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318621 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-log-ovn\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318636 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-etc-ovs\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318688 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-lib\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318779 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wncjf\" (UniqueName: \"kubernetes.io/projected/f75e819c-db65-4ab4-8530-1390b8a83dd0-kube-api-access-wncjf\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318788 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ab69b71e-d666-46a7-a896-96a70fff685a-var-run-ovn\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318904 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f75e819c-db65-4ab4-8530-1390b8a83dd0-scripts\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318938 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab69b71e-d666-46a7-a896-96a70fff685a-scripts\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.318962 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-log\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.319007 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab69b71e-d666-46a7-a896-96a70fff685a-ovn-controller-tls-certs\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.321266 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab69b71e-d666-46a7-a896-96a70fff685a-scripts\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.321413 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f75e819c-db65-4ab4-8530-1390b8a83dd0-var-log\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.329437 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab69b71e-d666-46a7-a896-96a70fff685a-combined-ca-bundle\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.333751 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f75e819c-db65-4ab4-8530-1390b8a83dd0-scripts\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.336309 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbrkq\" (UniqueName: \"kubernetes.io/projected/ab69b71e-d666-46a7-a896-96a70fff685a-kube-api-access-zbrkq\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.336936 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab69b71e-d666-46a7-a896-96a70fff685a-ovn-controller-tls-certs\") pod \"ovn-controller-cttvn\" (UID: \"ab69b71e-d666-46a7-a896-96a70fff685a\") " pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.371535 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.377686 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wncjf\" (UniqueName: \"kubernetes.io/projected/f75e819c-db65-4ab4-8530-1390b8a83dd0-kube-api-access-wncjf\") pod \"ovn-controller-ovs-rnf5z\" (UID: \"f75e819c-db65-4ab4-8530-1390b8a83dd0\") " pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:46 crc kubenswrapper[4840]: I1205 15:15:46.396553 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.544440 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.545894 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.557039 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.557284 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.557459 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.561507 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-kmlwn" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.561922 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.589481 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771056 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnh2x\" (UniqueName: \"kubernetes.io/projected/0fdc90f7-3261-4c8f-860b-c5f3890d3470-kube-api-access-xnh2x\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771104 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc90f7-3261-4c8f-860b-c5f3890d3470-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771153 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771178 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771252 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0fdc90f7-3261-4c8f-860b-c5f3890d3470-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771282 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771347 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.771377 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fdc90f7-3261-4c8f-860b-c5f3890d3470-config\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.874018 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0fdc90f7-3261-4c8f-860b-c5f3890d3470-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.874525 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:47 crc kubenswrapper[4840]: I1205 15:15:47.874744 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:47.877034 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fdc90f7-3261-4c8f-860b-c5f3890d3470-config\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:47.877261 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnh2x\" (UniqueName: \"kubernetes.io/projected/0fdc90f7-3261-4c8f-860b-c5f3890d3470-kube-api-access-xnh2x\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:47.877327 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc90f7-3261-4c8f-860b-c5f3890d3470-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:47.877469 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:47.877546 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.012684 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.094808 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fdc90f7-3261-4c8f-860b-c5f3890d3470-config\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.094881 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0fdc90f7-3261-4c8f-860b-c5f3890d3470-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.098893 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0fdc90f7-3261-4c8f-860b-c5f3890d3470-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.100111 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.103322 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnh2x\" (UniqueName: \"kubernetes.io/projected/0fdc90f7-3261-4c8f-860b-c5f3890d3470-kube-api-access-xnh2x\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.107234 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.113043 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.113285 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fdc90f7-3261-4c8f-860b-c5f3890d3470-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0fdc90f7-3261-4c8f-860b-c5f3890d3470\") " pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:48 crc kubenswrapper[4840]: I1205 15:15:48.195179 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.423726 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.425627 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.427820 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.433907 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.434096 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-6gh54" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.434281 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.451513 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.458947 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590192 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7548669d-ea2b-4442-b4b6-f3408d636798-config\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590279 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590320 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590388 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590410 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7548669d-ea2b-4442-b4b6-f3408d636798-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590438 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffp8z\" (UniqueName: \"kubernetes.io/projected/7548669d-ea2b-4442-b4b6-f3408d636798-kube-api-access-ffp8z\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590627 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.590726 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7548669d-ea2b-4442-b4b6-f3408d636798-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692529 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692602 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692626 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692650 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7548669d-ea2b-4442-b4b6-f3408d636798-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692675 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffp8z\" (UniqueName: \"kubernetes.io/projected/7548669d-ea2b-4442-b4b6-f3408d636798-kube-api-access-ffp8z\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692715 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692762 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7548669d-ea2b-4442-b4b6-f3408d636798-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.692803 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7548669d-ea2b-4442-b4b6-f3408d636798-config\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.693239 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.693902 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7548669d-ea2b-4442-b4b6-f3408d636798-config\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.694429 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7548669d-ea2b-4442-b4b6-f3408d636798-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.712672 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.714110 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.715116 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7548669d-ea2b-4442-b4b6-f3408d636798-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.719298 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7548669d-ea2b-4442-b4b6-f3408d636798-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.721542 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffp8z\" (UniqueName: \"kubernetes.io/projected/7548669d-ea2b-4442-b4b6-f3408d636798-kube-api-access-ffp8z\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.734178 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7548669d-ea2b-4442-b4b6-f3408d636798\") " pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:49 crc kubenswrapper[4840]: I1205 15:15:49.747906 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 15:15:55 crc kubenswrapper[4840]: I1205 15:15:55.979793 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e8fc49c1-0820-4dcb-9a50-9d3504b768d9","Type":"ContainerStarted","Data":"75cf0e11f87e9cb61b90271e5611fcf637b08e90fd20b5a8db321a142bbccd87"} Dec 05 15:15:56 crc kubenswrapper[4840]: I1205 15:15:56.548932 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 15:15:57 crc kubenswrapper[4840]: E1205 15:15:57.151623 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 15:15:57 crc kubenswrapper[4840]: E1205 15:15:57.152033 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2z44x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-d5ll6_openstack(299512bd-f667-4602-8ec2-307ecdcbabc4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:15:57 crc kubenswrapper[4840]: E1205 15:15:57.153464 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" podUID="299512bd-f667-4602-8ec2-307ecdcbabc4" Dec 05 15:15:57 crc kubenswrapper[4840]: E1205 15:15:57.188361 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 15:15:57 crc kubenswrapper[4840]: E1205 15:15:57.188484 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rz4s6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-brnf4_openstack(71997911-0710-4c53-b9ce-fe9673740c28): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:15:57 crc kubenswrapper[4840]: E1205 15:15:57.189654 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" podUID="71997911-0710-4c53-b9ce-fe9673740c28" Dec 05 15:15:57 crc kubenswrapper[4840]: I1205 15:15:57.556545 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:15:57 crc kubenswrapper[4840]: I1205 15:15:57.563144 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 15:15:57 crc kubenswrapper[4840]: W1205 15:15:57.615849 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf169c577_448f_45db_bcdd_f34f5c24e6bb.slice/crio-74c93ee1ddd798004ea65f0b10f368feb2fc5b9e1620af9c0ff331d6b4e36000 WatchSource:0}: Error finding container 74c93ee1ddd798004ea65f0b10f368feb2fc5b9e1620af9c0ff331d6b4e36000: Status 404 returned error can't find the container with id 74c93ee1ddd798004ea65f0b10f368feb2fc5b9e1620af9c0ff331d6b4e36000 Dec 05 15:15:57 crc kubenswrapper[4840]: I1205 15:15:57.820721 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:15:57 crc kubenswrapper[4840]: W1205 15:15:57.867139 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d91fd7f_f657_458d_9f80_3d915e5fa97d.slice/crio-55a87ef9ec0a0b8720b5a9e975dd339d1fdb2a0e551083319bd22caafdf1cf1f WatchSource:0}: Error finding container 55a87ef9ec0a0b8720b5a9e975dd339d1fdb2a0e551083319bd22caafdf1cf1f: Status 404 returned error can't find the container with id 55a87ef9ec0a0b8720b5a9e975dd339d1fdb2a0e551083319bd22caafdf1cf1f Dec 05 15:15:57 crc kubenswrapper[4840]: I1205 15:15:57.994359 4840 generic.go:334] "Generic (PLEG): container finished" podID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerID="885a721a096d0a6f16b2f979ede5a75c2dc24d24ea9a4584870baa4d5c441614" exitCode=0 Dec 05 15:15:57 crc kubenswrapper[4840]: I1205 15:15:57.994456 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-btff4" event={"ID":"56638051-dc23-40f8-b1f3-01d17d4a87e9","Type":"ContainerDied","Data":"885a721a096d0a6f16b2f979ede5a75c2dc24d24ea9a4584870baa4d5c441614"} Dec 05 15:15:57 crc kubenswrapper[4840]: I1205 15:15:57.997608 4840 generic.go:334] "Generic (PLEG): container finished" podID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerID="f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a" exitCode=0 Dec 05 15:15:57 crc kubenswrapper[4840]: I1205 15:15:57.997678 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" event={"ID":"d4640d84-ab2c-41ae-b84b-461d096d1e28","Type":"ContainerDied","Data":"f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a"} Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.009548 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2d91fd7f-f657-458d-9f80-3d915e5fa97d","Type":"ContainerStarted","Data":"55a87ef9ec0a0b8720b5a9e975dd339d1fdb2a0e551083319bd22caafdf1cf1f"} Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.011018 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"28fec705-baad-4e89-94d4-e1e7d64579a1","Type":"ContainerStarted","Data":"2d3bb39bd6f8eed04c63dcb44dcf4a92d7e8b2a67928b558c779783d737409fa"} Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.019111 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f169c577-448f-45db-bcdd-f34f5c24e6bb","Type":"ContainerStarted","Data":"74c93ee1ddd798004ea65f0b10f368feb2fc5b9e1620af9c0ff331d6b4e36000"} Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.020259 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"bd532161-0cce-4e82-b084-45e1569c1575","Type":"ContainerStarted","Data":"8b1b38110a132274497ae42c2f08199780a6cfa50fbb77bcbaf0ea70e2bc2827"} Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.139967 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.175432 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 15:15:58 crc kubenswrapper[4840]: W1205 15:15:58.192146 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab69b71e_d666_46a7_a896_96a70fff685a.slice/crio-81c61e74eaf432740a5ced7773903ece9155096817dee8be369f3814cc267add WatchSource:0}: Error finding container 81c61e74eaf432740a5ced7773903ece9155096817dee8be369f3814cc267add: Status 404 returned error can't find the container with id 81c61e74eaf432740a5ced7773903ece9155096817dee8be369f3814cc267add Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.197799 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn"] Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.213752 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rnf5z"] Dec 05 15:15:58 crc kubenswrapper[4840]: E1205 15:15:58.429033 4840 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 05 15:15:58 crc kubenswrapper[4840]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/56638051-dc23-40f8-b1f3-01d17d4a87e9/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 15:15:58 crc kubenswrapper[4840]: > podSandboxID="235772afffc1ea5413d0fdd02e837dca1073bc6dcd44398327b3c17d37ccf88d" Dec 05 15:15:58 crc kubenswrapper[4840]: E1205 15:15:58.429462 4840 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 05 15:15:58 crc kubenswrapper[4840]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qkx2x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-btff4_openstack(56638051-dc23-40f8-b1f3-01d17d4a87e9): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/56638051-dc23-40f8-b1f3-01d17d4a87e9/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 15:15:58 crc kubenswrapper[4840]: > logger="UnhandledError" Dec 05 15:15:58 crc kubenswrapper[4840]: E1205 15:15:58.430738 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/56638051-dc23-40f8-b1f3-01d17d4a87e9/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-btff4" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.624603 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.633377 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.751743 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.763348 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-dns-svc\") pod \"299512bd-f667-4602-8ec2-307ecdcbabc4\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.763482 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71997911-0710-4c53-b9ce-fe9673740c28-config\") pod \"71997911-0710-4c53-b9ce-fe9673740c28\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.763566 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z44x\" (UniqueName: \"kubernetes.io/projected/299512bd-f667-4602-8ec2-307ecdcbabc4-kube-api-access-2z44x\") pod \"299512bd-f667-4602-8ec2-307ecdcbabc4\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.763936 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz4s6\" (UniqueName: \"kubernetes.io/projected/71997911-0710-4c53-b9ce-fe9673740c28-kube-api-access-rz4s6\") pod \"71997911-0710-4c53-b9ce-fe9673740c28\" (UID: \"71997911-0710-4c53-b9ce-fe9673740c28\") " Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.763963 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-config\") pod \"299512bd-f667-4602-8ec2-307ecdcbabc4\" (UID: \"299512bd-f667-4602-8ec2-307ecdcbabc4\") " Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.764332 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "299512bd-f667-4602-8ec2-307ecdcbabc4" (UID: "299512bd-f667-4602-8ec2-307ecdcbabc4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.764341 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71997911-0710-4c53-b9ce-fe9673740c28-config" (OuterVolumeSpecName: "config") pod "71997911-0710-4c53-b9ce-fe9673740c28" (UID: "71997911-0710-4c53-b9ce-fe9673740c28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.765063 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-config" (OuterVolumeSpecName: "config") pod "299512bd-f667-4602-8ec2-307ecdcbabc4" (UID: "299512bd-f667-4602-8ec2-307ecdcbabc4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.775239 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71997911-0710-4c53-b9ce-fe9673740c28-kube-api-access-rz4s6" (OuterVolumeSpecName: "kube-api-access-rz4s6") pod "71997911-0710-4c53-b9ce-fe9673740c28" (UID: "71997911-0710-4c53-b9ce-fe9673740c28"). InnerVolumeSpecName "kube-api-access-rz4s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.775307 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/299512bd-f667-4602-8ec2-307ecdcbabc4-kube-api-access-2z44x" (OuterVolumeSpecName: "kube-api-access-2z44x") pod "299512bd-f667-4602-8ec2-307ecdcbabc4" (UID: "299512bd-f667-4602-8ec2-307ecdcbabc4"). InnerVolumeSpecName "kube-api-access-2z44x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.998638 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz4s6\" (UniqueName: \"kubernetes.io/projected/71997911-0710-4c53-b9ce-fe9673740c28-kube-api-access-rz4s6\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.998686 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.998700 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/299512bd-f667-4602-8ec2-307ecdcbabc4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.998710 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71997911-0710-4c53-b9ce-fe9673740c28-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:58 crc kubenswrapper[4840]: I1205 15:15:58.998722 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z44x\" (UniqueName: \"kubernetes.io/projected/299512bd-f667-4602-8ec2-307ecdcbabc4-kube-api-access-2z44x\") on node \"crc\" DevicePath \"\"" Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.031703 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" event={"ID":"d4640d84-ab2c-41ae-b84b-461d096d1e28","Type":"ContainerStarted","Data":"be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490"} Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.032091 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.035803 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" event={"ID":"299512bd-f667-4602-8ec2-307ecdcbabc4","Type":"ContainerDied","Data":"5bdaebe222f11b7bf646bd29ceeede22b0dd1c95a24b4ee040bcac489df9d6fb"} Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.035817 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-d5ll6" Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.051731 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn" event={"ID":"ab69b71e-d666-46a7-a896-96a70fff685a","Type":"ContainerStarted","Data":"81c61e74eaf432740a5ced7773903ece9155096817dee8be369f3814cc267add"} Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.054813 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rnf5z" event={"ID":"f75e819c-db65-4ab4-8530-1390b8a83dd0","Type":"ContainerStarted","Data":"5e03b2c9089a12a2b7c4bc22477d0853d148fd1f7735596f6c3040ff829fb50e"} Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.058339 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" podStartSLOduration=9.967230229 podStartE2EDuration="24.058320443s" podCreationTimestamp="2025-12-05 15:15:35 +0000 UTC" firstStartedPulling="2025-12-05 15:15:43.181409439 +0000 UTC m=+1021.522472093" lastFinishedPulling="2025-12-05 15:15:57.272499693 +0000 UTC m=+1035.613562307" observedRunningTime="2025-12-05 15:15:59.051071757 +0000 UTC m=+1037.392134391" watchObservedRunningTime="2025-12-05 15:15:59.058320443 +0000 UTC m=+1037.399383067" Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.062190 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b1a586cf-ff08-4975-b172-0167bb10ff77","Type":"ContainerStarted","Data":"6506bdcf0cd5c0bb55cc950ad4f4290aca985d52724014cbaf7e1399f9f760d2"} Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.064166 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.064179 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-brnf4" event={"ID":"71997911-0710-4c53-b9ce-fe9673740c28","Type":"ContainerDied","Data":"9b12446948db0e300bbc46ca7f60d61d79fe129d9eab7729ea093bd74cae795d"} Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.066146 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7548669d-ea2b-4442-b4b6-f3408d636798","Type":"ContainerStarted","Data":"e41b293e8328ee370581ef0d93dff566c287e989593b6edcab534ca497f54842"} Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.108908 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-d5ll6"] Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.116562 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-d5ll6"] Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.146641 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-brnf4"] Dec 05 15:15:59 crc kubenswrapper[4840]: I1205 15:15:59.154395 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-brnf4"] Dec 05 15:16:00 crc kubenswrapper[4840]: I1205 15:16:00.078026 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="299512bd-f667-4602-8ec2-307ecdcbabc4" path="/var/lib/kubelet/pods/299512bd-f667-4602-8ec2-307ecdcbabc4/volumes" Dec 05 15:16:00 crc kubenswrapper[4840]: I1205 15:16:00.079366 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71997911-0710-4c53-b9ce-fe9673740c28" path="/var/lib/kubelet/pods/71997911-0710-4c53-b9ce-fe9673740c28/volumes" Dec 05 15:16:01 crc kubenswrapper[4840]: I1205 15:16:01.084467 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0fdc90f7-3261-4c8f-860b-c5f3890d3470","Type":"ContainerStarted","Data":"d1a0af6e1699c05d980d43692a3d39a4bb8ac5427fbfeea481f6ad02b486706c"} Dec 05 15:16:06 crc kubenswrapper[4840]: I1205 15:16:06.246588 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:16:06 crc kubenswrapper[4840]: I1205 15:16:06.311901 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-btff4"] Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.299286 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-btff4" event={"ID":"56638051-dc23-40f8-b1f3-01d17d4a87e9","Type":"ContainerStarted","Data":"d395a6914261b636480321de2187c5b4351f05177d71b3b9f45f4d80de02939f"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.299931 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.299387 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-btff4" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerName="dnsmasq-dns" containerID="cri-o://d395a6914261b636480321de2187c5b4351f05177d71b3b9f45f4d80de02939f" gracePeriod=10 Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.301439 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0fdc90f7-3261-4c8f-860b-c5f3890d3470","Type":"ContainerStarted","Data":"e57adb7bdaf7de807a11d8916600fb7b941e9303ca162094714fcaea2dfbeb4f"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.302763 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rnf5z" event={"ID":"f75e819c-db65-4ab4-8530-1390b8a83dd0","Type":"ContainerStarted","Data":"ee3b4a7b7f835d2497353608149ab110f714bffb3bb45053889490976935fbb2"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.304500 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"28fec705-baad-4e89-94d4-e1e7d64579a1","Type":"ContainerStarted","Data":"87ab3eb07a8a8c83840ea209479a58e65893947288b5df4acbbc2ef33a46cd80"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.307190 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b1a586cf-ff08-4975-b172-0167bb10ff77","Type":"ContainerStarted","Data":"07052e4c0597fce5e9d5a0bbbef2498eb2991b7beccb570e8d5e42ce83bb5b84"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.308533 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7548669d-ea2b-4442-b4b6-f3408d636798","Type":"ContainerStarted","Data":"13a0f285ccd11ef1b54c66e0b79a0785132c0b75f3176d3a272462bcb518cca2"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.309628 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"bd532161-0cce-4e82-b084-45e1569c1575","Type":"ContainerStarted","Data":"0ca604581e78219c1588d30e53b4ef78163f2a250dc59f29015fe3fe9aaa2474"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.309812 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.311258 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2d91fd7f-f657-458d-9f80-3d915e5fa97d","Type":"ContainerStarted","Data":"262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.311676 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.313324 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn" event={"ID":"ab69b71e-d666-46a7-a896-96a70fff685a","Type":"ContainerStarted","Data":"08eed9457e76c1a1da3733f37dab671e82dcddbfcf631680d60a56432aa54608"} Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.313477 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-cttvn" Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.350755 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-btff4" podStartSLOduration=21.252562941 podStartE2EDuration="35.350727706s" podCreationTimestamp="2025-12-05 15:15:35 +0000 UTC" firstStartedPulling="2025-12-05 15:15:43.181308596 +0000 UTC m=+1021.522371210" lastFinishedPulling="2025-12-05 15:15:57.279473341 +0000 UTC m=+1035.620535975" observedRunningTime="2025-12-05 15:16:10.342828352 +0000 UTC m=+1048.683890966" watchObservedRunningTime="2025-12-05 15:16:10.350727706 +0000 UTC m=+1048.691790320" Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.437129 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-cttvn" podStartSLOduration=14.339970449 podStartE2EDuration="25.437107522s" podCreationTimestamp="2025-12-05 15:15:45 +0000 UTC" firstStartedPulling="2025-12-05 15:15:58.194443561 +0000 UTC m=+1036.535506175" lastFinishedPulling="2025-12-05 15:16:09.291580634 +0000 UTC m=+1047.632643248" observedRunningTime="2025-12-05 15:16:10.417497207 +0000 UTC m=+1048.758559821" watchObservedRunningTime="2025-12-05 15:16:10.437107522 +0000 UTC m=+1048.778170136" Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.459420 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=16.647481154 podStartE2EDuration="28.459389903s" podCreationTimestamp="2025-12-05 15:15:42 +0000 UTC" firstStartedPulling="2025-12-05 15:15:57.870658112 +0000 UTC m=+1036.211720726" lastFinishedPulling="2025-12-05 15:16:09.682566861 +0000 UTC m=+1048.023629475" observedRunningTime="2025-12-05 15:16:10.440839628 +0000 UTC m=+1048.781902242" watchObservedRunningTime="2025-12-05 15:16:10.459389903 +0000 UTC m=+1048.800452517" Dec 05 15:16:10 crc kubenswrapper[4840]: I1205 15:16:10.504798 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=20.252043026 podStartE2EDuration="30.504772538s" podCreationTimestamp="2025-12-05 15:15:40 +0000 UTC" firstStartedPulling="2025-12-05 15:15:57.148678747 +0000 UTC m=+1035.489741361" lastFinishedPulling="2025-12-05 15:16:07.401408259 +0000 UTC m=+1045.742470873" observedRunningTime="2025-12-05 15:16:10.504617084 +0000 UTC m=+1048.845679698" watchObservedRunningTime="2025-12-05 15:16:10.504772538 +0000 UTC m=+1048.845835152" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.321354 4840 generic.go:334] "Generic (PLEG): container finished" podID="f75e819c-db65-4ab4-8530-1390b8a83dd0" containerID="ee3b4a7b7f835d2497353608149ab110f714bffb3bb45053889490976935fbb2" exitCode=0 Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.321415 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rnf5z" event={"ID":"f75e819c-db65-4ab4-8530-1390b8a83dd0","Type":"ContainerDied","Data":"ee3b4a7b7f835d2497353608149ab110f714bffb3bb45053889490976935fbb2"} Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.323929 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f169c577-448f-45db-bcdd-f34f5c24e6bb","Type":"ContainerStarted","Data":"a44027a9b622191feea57e10e032a53de96578f30e052b68af4d233fb01ec896"} Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.327306 4840 generic.go:334] "Generic (PLEG): container finished" podID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerID="d395a6914261b636480321de2187c5b4351f05177d71b3b9f45f4d80de02939f" exitCode=0 Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.327402 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-btff4" event={"ID":"56638051-dc23-40f8-b1f3-01d17d4a87e9","Type":"ContainerDied","Data":"d395a6914261b636480321de2187c5b4351f05177d71b3b9f45f4d80de02939f"} Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.327472 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-btff4" event={"ID":"56638051-dc23-40f8-b1f3-01d17d4a87e9","Type":"ContainerDied","Data":"235772afffc1ea5413d0fdd02e837dca1073bc6dcd44398327b3c17d37ccf88d"} Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.327493 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="235772afffc1ea5413d0fdd02e837dca1073bc6dcd44398327b3c17d37ccf88d" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.351822 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.427673 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkx2x\" (UniqueName: \"kubernetes.io/projected/56638051-dc23-40f8-b1f3-01d17d4a87e9-kube-api-access-qkx2x\") pod \"56638051-dc23-40f8-b1f3-01d17d4a87e9\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.427738 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-dns-svc\") pod \"56638051-dc23-40f8-b1f3-01d17d4a87e9\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.427778 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-config\") pod \"56638051-dc23-40f8-b1f3-01d17d4a87e9\" (UID: \"56638051-dc23-40f8-b1f3-01d17d4a87e9\") " Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.433732 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56638051-dc23-40f8-b1f3-01d17d4a87e9-kube-api-access-qkx2x" (OuterVolumeSpecName: "kube-api-access-qkx2x") pod "56638051-dc23-40f8-b1f3-01d17d4a87e9" (UID: "56638051-dc23-40f8-b1f3-01d17d4a87e9"). InnerVolumeSpecName "kube-api-access-qkx2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.474584 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-config" (OuterVolumeSpecName: "config") pod "56638051-dc23-40f8-b1f3-01d17d4a87e9" (UID: "56638051-dc23-40f8-b1f3-01d17d4a87e9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.478797 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "56638051-dc23-40f8-b1f3-01d17d4a87e9" (UID: "56638051-dc23-40f8-b1f3-01d17d4a87e9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.530172 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkx2x\" (UniqueName: \"kubernetes.io/projected/56638051-dc23-40f8-b1f3-01d17d4a87e9-kube-api-access-qkx2x\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.530205 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:11 crc kubenswrapper[4840]: I1205 15:16:11.530214 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56638051-dc23-40f8-b1f3-01d17d4a87e9-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.529625 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rnf5z" event={"ID":"f75e819c-db65-4ab4-8530-1390b8a83dd0","Type":"ContainerStarted","Data":"6723d2425b6dafa8da5300522cfaabcd755d2bc0b349577819cee5d88a469567"} Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.529974 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.529988 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.529996 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rnf5z" event={"ID":"f75e819c-db65-4ab4-8530-1390b8a83dd0","Type":"ContainerStarted","Data":"1c0f1021351918c380c4bbc6395235ebf9a4ecf93c39d3d9ab3552dd50ede987"} Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.533961 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e8fc49c1-0820-4dcb-9a50-9d3504b768d9","Type":"ContainerStarted","Data":"7d8de606e132e801bf3051d60b8122b938e918f5ababd4e11fc0130a6cfaf2ca"} Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.534069 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-btff4" Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.554538 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-rnf5z" podStartSLOduration=16.972866136 podStartE2EDuration="27.554516862s" podCreationTimestamp="2025-12-05 15:15:45 +0000 UTC" firstStartedPulling="2025-12-05 15:15:58.187659379 +0000 UTC m=+1036.528721993" lastFinishedPulling="2025-12-05 15:16:08.769310105 +0000 UTC m=+1047.110372719" observedRunningTime="2025-12-05 15:16:12.548848492 +0000 UTC m=+1050.889911106" watchObservedRunningTime="2025-12-05 15:16:12.554516862 +0000 UTC m=+1050.895579476" Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.570914 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-btff4"] Dec 05 15:16:12 crc kubenswrapper[4840]: I1205 15:16:12.576959 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-btff4"] Dec 05 15:16:14 crc kubenswrapper[4840]: I1205 15:16:14.161570 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" path="/var/lib/kubelet/pods/56638051-dc23-40f8-b1f3-01d17d4a87e9/volumes" Dec 05 15:16:16 crc kubenswrapper[4840]: I1205 15:16:16.191170 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 15:16:16 crc kubenswrapper[4840]: I1205 15:16:16.667724 4840 generic.go:334] "Generic (PLEG): container finished" podID="28fec705-baad-4e89-94d4-e1e7d64579a1" containerID="87ab3eb07a8a8c83840ea209479a58e65893947288b5df4acbbc2ef33a46cd80" exitCode=0 Dec 05 15:16:16 crc kubenswrapper[4840]: I1205 15:16:16.667768 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"28fec705-baad-4e89-94d4-e1e7d64579a1","Type":"ContainerDied","Data":"87ab3eb07a8a8c83840ea209479a58e65893947288b5df4acbbc2ef33a46cd80"} Dec 05 15:16:17 crc kubenswrapper[4840]: I1205 15:16:17.675564 4840 generic.go:334] "Generic (PLEG): container finished" podID="b1a586cf-ff08-4975-b172-0167bb10ff77" containerID="07052e4c0597fce5e9d5a0bbbef2498eb2991b7beccb570e8d5e42ce83bb5b84" exitCode=0 Dec 05 15:16:17 crc kubenswrapper[4840]: I1205 15:16:17.675796 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b1a586cf-ff08-4975-b172-0167bb10ff77","Type":"ContainerDied","Data":"07052e4c0597fce5e9d5a0bbbef2498eb2991b7beccb570e8d5e42ce83bb5b84"} Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.682352 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.715977 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5hmk"] Dec 05 15:16:22 crc kubenswrapper[4840]: E1205 15:16:22.716478 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerName="init" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.716500 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerName="init" Dec 05 15:16:22 crc kubenswrapper[4840]: E1205 15:16:22.716520 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerName="dnsmasq-dns" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.716534 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerName="dnsmasq-dns" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.716906 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="56638051-dc23-40f8-b1f3-01d17d4a87e9" containerName="dnsmasq-dns" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.717979 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.745964 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5hmk"] Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.765015 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-config\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.765073 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.765166 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgbb4\" (UniqueName: \"kubernetes.io/projected/38350ce1-63f1-48d6-b83c-38e883b00db0-kube-api-access-rgbb4\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.866907 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-config\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.866974 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.867066 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgbb4\" (UniqueName: \"kubernetes.io/projected/38350ce1-63f1-48d6-b83c-38e883b00db0-kube-api-access-rgbb4\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.867915 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-config\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.867970 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:22 crc kubenswrapper[4840]: I1205 15:16:22.885686 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgbb4\" (UniqueName: \"kubernetes.io/projected/38350ce1-63f1-48d6-b83c-38e883b00db0-kube-api-access-rgbb4\") pod \"dnsmasq-dns-7cb5889db5-v5hmk\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.049262 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.892002 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.905304 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.908433 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.908497 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.908659 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.908839 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-s56bl" Dec 05 15:16:23 crc kubenswrapper[4840]: I1205 15:16:23.926137 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.082595 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpzqm\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-kube-api-access-hpzqm\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.082661 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/49fa86fd-482b-426d-9ec6-2c963600851e-lock\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.082697 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/49fa86fd-482b-426d-9ec6-2c963600851e-cache\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.082715 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.082738 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.184233 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpzqm\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-kube-api-access-hpzqm\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.184611 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/49fa86fd-482b-426d-9ec6-2c963600851e-lock\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.184687 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/49fa86fd-482b-426d-9ec6-2c963600851e-cache\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.184729 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.184773 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.186427 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: E1205 15:16:24.186713 4840 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 15:16:24 crc kubenswrapper[4840]: E1205 15:16:24.186795 4840 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 15:16:24 crc kubenswrapper[4840]: E1205 15:16:24.186954 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift podName:49fa86fd-482b-426d-9ec6-2c963600851e nodeName:}" failed. No retries permitted until 2025-12-05 15:16:24.686937582 +0000 UTC m=+1063.028000196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift") pod "swift-storage-0" (UID: "49fa86fd-482b-426d-9ec6-2c963600851e") : configmap "swift-ring-files" not found Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.187394 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/49fa86fd-482b-426d-9ec6-2c963600851e-lock\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.187629 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/49fa86fd-482b-426d-9ec6-2c963600851e-cache\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.214054 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpzqm\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-kube-api-access-hpzqm\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.214542 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: I1205 15:16:24.695013 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:24 crc kubenswrapper[4840]: E1205 15:16:24.695191 4840 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 15:16:24 crc kubenswrapper[4840]: E1205 15:16:24.695220 4840 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 15:16:24 crc kubenswrapper[4840]: E1205 15:16:24.695305 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift podName:49fa86fd-482b-426d-9ec6-2c963600851e nodeName:}" failed. No retries permitted until 2025-12-05 15:16:25.695264997 +0000 UTC m=+1064.036327621 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift") pod "swift-storage-0" (UID: "49fa86fd-482b-426d-9ec6-2c963600851e") : configmap "swift-ring-files" not found Dec 05 15:16:25 crc kubenswrapper[4840]: I1205 15:16:25.710424 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:25 crc kubenswrapper[4840]: E1205 15:16:25.710680 4840 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 15:16:25 crc kubenswrapper[4840]: E1205 15:16:25.710714 4840 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 15:16:25 crc kubenswrapper[4840]: E1205 15:16:25.710795 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift podName:49fa86fd-482b-426d-9ec6-2c963600851e nodeName:}" failed. No retries permitted until 2025-12-05 15:16:27.710769463 +0000 UTC m=+1066.051832087 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift") pod "swift-storage-0" (UID: "49fa86fd-482b-426d-9ec6-2c963600851e") : configmap "swift-ring-files" not found Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.740688 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:27 crc kubenswrapper[4840]: E1205 15:16:27.740910 4840 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 15:16:27 crc kubenswrapper[4840]: E1205 15:16:27.741092 4840 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 15:16:27 crc kubenswrapper[4840]: E1205 15:16:27.741140 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift podName:49fa86fd-482b-426d-9ec6-2c963600851e nodeName:}" failed. No retries permitted until 2025-12-05 15:16:31.741123737 +0000 UTC m=+1070.082186351 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift") pod "swift-storage-0" (UID: "49fa86fd-482b-426d-9ec6-2c963600851e") : configmap "swift-ring-files" not found Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.805110 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-pwjzh"] Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.806376 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.808421 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.809257 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.811410 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.832057 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-pwjzh"] Dec 05 15:16:27 crc kubenswrapper[4840]: E1205 15:16:27.833019 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-xb2d4 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-pwjzh" podUID="aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.840900 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-b696t"] Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.842730 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.843098 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-etc-swift\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.843295 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-ring-data-devices\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.843458 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb2d4\" (UniqueName: \"kubernetes.io/projected/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-kube-api-access-xb2d4\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.843667 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-dispersionconf\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.843996 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-scripts\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.844202 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-swiftconf\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.844428 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-combined-ca-bundle\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.852484 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-b696t"] Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.870621 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-pwjzh"] Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946498 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-combined-ca-bundle\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946588 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd5dm\" (UniqueName: \"kubernetes.io/projected/45ba7f21-a1e8-4443-816f-91c5392f62df-kube-api-access-bd5dm\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946617 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-scripts\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946633 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-swiftconf\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946656 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-combined-ca-bundle\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946704 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45ba7f21-a1e8-4443-816f-91c5392f62df-etc-swift\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946755 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-etc-swift\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946786 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-scripts\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946813 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-ring-data-devices\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946837 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb2d4\" (UniqueName: \"kubernetes.io/projected/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-kube-api-access-xb2d4\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946857 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-dispersionconf\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946901 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-dispersionconf\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946922 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-ring-data-devices\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.946949 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-swiftconf\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.947654 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-ring-data-devices\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.947857 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-etc-swift\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.947915 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-scripts\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.952420 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-dispersionconf\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.952666 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-swiftconf\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.966581 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-combined-ca-bundle\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:27 crc kubenswrapper[4840]: I1205 15:16:27.967590 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb2d4\" (UniqueName: \"kubernetes.io/projected/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-kube-api-access-xb2d4\") pod \"swift-ring-rebalance-pwjzh\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.047949 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-dispersionconf\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.048014 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-ring-data-devices\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.048038 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-swiftconf\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.048067 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-combined-ca-bundle\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.048124 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd5dm\" (UniqueName: \"kubernetes.io/projected/45ba7f21-a1e8-4443-816f-91c5392f62df-kube-api-access-bd5dm\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.048152 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45ba7f21-a1e8-4443-816f-91c5392f62df-etc-swift\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.048235 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-scripts\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.049980 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-ring-data-devices\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.050040 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-scripts\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.050193 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45ba7f21-a1e8-4443-816f-91c5392f62df-etc-swift\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.052855 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-swiftconf\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.054319 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-dispersionconf\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.058645 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-combined-ca-bundle\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.065992 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd5dm\" (UniqueName: \"kubernetes.io/projected/45ba7f21-a1e8-4443-816f-91c5392f62df-kube-api-access-bd5dm\") pod \"swift-ring-rebalance-b696t\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.161827 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.763889 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:28 crc kubenswrapper[4840]: E1205 15:16:28.788119 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified" Dec 05 15:16:28 crc kubenswrapper[4840]: E1205 15:16:28.788382 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstack-network-exporter,Image:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,Command:[/app/openstack-network-exporter],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPENSTACK_NETWORK_EXPORTER_YAML,Value:/etc/config/openstack-network-exporter.yaml,ValueFrom:nil,},EnvVar{Name:CONFIG_HASH,Value:n5fdh676hc9h68fh696h55bh9dhc6h58bh89h577h64fh85h686h685h686h5ch67dh686h66hbh597hc8h68ch66dh56chcbh579h669h85h8h688q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovnmetrics.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovnmetrics.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xnh2x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-nb-0_openstack(0fdc90f7-3261-4c8f-860b-c5f3890d3470): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:16:28 crc kubenswrapper[4840]: E1205 15:16:28.789836 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-nb-0" podUID="0fdc90f7-3261-4c8f-860b-c5f3890d3470" Dec 05 15:16:28 crc kubenswrapper[4840]: E1205 15:16:28.799598 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified" Dec 05 15:16:28 crc kubenswrapper[4840]: E1205 15:16:28.799733 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstack-network-exporter,Image:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,Command:[/app/openstack-network-exporter],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPENSTACK_NETWORK_EXPORTER_YAML,Value:/etc/config/openstack-network-exporter.yaml,ValueFrom:nil,},EnvVar{Name:CONFIG_HASH,Value:n55ch678h567h67fh646h89h584h86hb8hb7h5cfh5d4h694h5bh58fh98h99h67ch554h96h55fhfdh8dh94h8fh685h9dh5c4h565h5c6h59fhb7q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovnmetrics.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovnmetrics.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ffp8z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(7548669d-ea2b-4442-b4b6-f3408d636798): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:16:28 crc kubenswrapper[4840]: E1205 15:16:28.801443 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="7548669d-ea2b-4442-b4b6-f3408d636798" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.824805 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.864473 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-swiftconf\") pod \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.864510 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-ring-data-devices\") pod \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.864563 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-dispersionconf\") pod \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.864617 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-etc-swift\") pod \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.864648 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-scripts\") pod \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.864757 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb2d4\" (UniqueName: \"kubernetes.io/projected/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-kube-api-access-xb2d4\") pod \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.864792 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-combined-ca-bundle\") pod \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\" (UID: \"aa5968f6-f47e-4d08-bb15-c80ed8d76a6d\") " Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.865378 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" (UID: "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.865653 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" (UID: "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.865840 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-scripts" (OuterVolumeSpecName: "scripts") pod "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" (UID: "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.869960 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" (UID: "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.870916 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" (UID: "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.871347 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-kube-api-access-xb2d4" (OuterVolumeSpecName: "kube-api-access-xb2d4") pod "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" (UID: "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d"). InnerVolumeSpecName "kube-api-access-xb2d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.872074 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" (UID: "aa5968f6-f47e-4d08-bb15-c80ed8d76a6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.967139 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb2d4\" (UniqueName: \"kubernetes.io/projected/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-kube-api-access-xb2d4\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.967176 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.967190 4840 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.967202 4840 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.967214 4840 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.967226 4840 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:28 crc kubenswrapper[4840]: I1205 15:16:28.967237 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.063531 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-b696t"] Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.149531 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5hmk"] Dec 05 15:16:29 crc kubenswrapper[4840]: W1205 15:16:29.160469 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38350ce1_63f1_48d6_b83c_38e883b00db0.slice/crio-ec4582698d65e96cefea0374f1ea0352afeebc6b6742e49d084b3aff7331ed3e WatchSource:0}: Error finding container ec4582698d65e96cefea0374f1ea0352afeebc6b6742e49d084b3aff7331ed3e: Status 404 returned error can't find the container with id ec4582698d65e96cefea0374f1ea0352afeebc6b6742e49d084b3aff7331ed3e Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.773129 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"28fec705-baad-4e89-94d4-e1e7d64579a1","Type":"ContainerStarted","Data":"58c6d152aa50473cebe71d666f51ff57f2b3a0c2161a9291e33b22a267138ffc"} Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.776641 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b1a586cf-ff08-4975-b172-0167bb10ff77","Type":"ContainerStarted","Data":"4467c2a935d3a75173b3c736b6ce2d84d913ba53a3b18c2228e6174fb3a50dec"} Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.778276 4840 generic.go:334] "Generic (PLEG): container finished" podID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerID="deed8b66add3e0f07a8477e6964b6de143961b6ad59d05fa7a83fb559d1d29f2" exitCode=0 Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.778382 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" event={"ID":"38350ce1-63f1-48d6-b83c-38e883b00db0","Type":"ContainerDied","Data":"deed8b66add3e0f07a8477e6964b6de143961b6ad59d05fa7a83fb559d1d29f2"} Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.778416 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" event={"ID":"38350ce1-63f1-48d6-b83c-38e883b00db0","Type":"ContainerStarted","Data":"ec4582698d65e96cefea0374f1ea0352afeebc6b6742e49d084b3aff7331ed3e"} Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.789586 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pwjzh" Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.794645 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b696t" event={"ID":"45ba7f21-a1e8-4443-816f-91c5392f62df","Type":"ContainerStarted","Data":"59dc59e94400bd39cb14459fb6fc4585bf66db4c84f9107b0a73d8f3317115bb"} Dec 05 15:16:29 crc kubenswrapper[4840]: E1205 15:16:29.796259 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="7548669d-ea2b-4442-b4b6-f3408d636798" Dec 05 15:16:29 crc kubenswrapper[4840]: E1205 15:16:29.801740 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="0fdc90f7-3261-4c8f-860b-c5f3890d3470" Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.807541 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=40.973970805 podStartE2EDuration="51.807497223s" podCreationTimestamp="2025-12-05 15:15:38 +0000 UTC" firstStartedPulling="2025-12-05 15:15:57.62869534 +0000 UTC m=+1035.969757954" lastFinishedPulling="2025-12-05 15:16:08.462221758 +0000 UTC m=+1046.803284372" observedRunningTime="2025-12-05 15:16:29.800814774 +0000 UTC m=+1068.141877438" watchObservedRunningTime="2025-12-05 15:16:29.807497223 +0000 UTC m=+1068.148559857" Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.834018 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=43.612867864 podStartE2EDuration="52.833994913s" podCreationTimestamp="2025-12-05 15:15:37 +0000 UTC" firstStartedPulling="2025-12-05 15:15:58.18030346 +0000 UTC m=+1036.521366074" lastFinishedPulling="2025-12-05 15:16:07.401430509 +0000 UTC m=+1045.742493123" observedRunningTime="2025-12-05 15:16:29.832608534 +0000 UTC m=+1068.173671188" watchObservedRunningTime="2025-12-05 15:16:29.833994913 +0000 UTC m=+1068.175057527" Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.959958 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-pwjzh"] Dec 05 15:16:29 crc kubenswrapper[4840]: I1205 15:16:29.976187 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-pwjzh"] Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.092400 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa5968f6-f47e-4d08-bb15-c80ed8d76a6d" path="/var/lib/kubelet/pods/aa5968f6-f47e-4d08-bb15-c80ed8d76a6d/volumes" Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.196206 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.235319 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.750638 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.750994 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.799217 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" event={"ID":"38350ce1-63f1-48d6-b83c-38e883b00db0","Type":"ContainerStarted","Data":"6d4ae6f4ab28ab0e2af13f2c80bd278ab42079100e205595b9bc9ca99e3540f4"} Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.799647 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 15:16:30 crc kubenswrapper[4840]: E1205 15:16:30.801885 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="0fdc90f7-3261-4c8f-860b-c5f3890d3470" Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.820831 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" podStartSLOduration=8.820810527999999 podStartE2EDuration="8.820810528s" podCreationTimestamp="2025-12-05 15:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:30.81948203 +0000 UTC m=+1069.160544644" watchObservedRunningTime="2025-12-05 15:16:30.820810528 +0000 UTC m=+1069.161873142" Dec 05 15:16:30 crc kubenswrapper[4840]: I1205 15:16:30.855698 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.282781 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-kbwmc"] Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.289770 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.294551 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-kbwmc"] Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.299499 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.335540 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npd9s\" (UniqueName: \"kubernetes.io/projected/3548faf3-ee23-449d-b44c-5858d2cdc9ec-kube-api-access-npd9s\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.335603 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3548faf3-ee23-449d-b44c-5858d2cdc9ec-ovn-rundir\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.335641 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3548faf3-ee23-449d-b44c-5858d2cdc9ec-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.335686 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3548faf3-ee23-449d-b44c-5858d2cdc9ec-config\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.335713 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3548faf3-ee23-449d-b44c-5858d2cdc9ec-combined-ca-bundle\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.335780 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3548faf3-ee23-449d-b44c-5858d2cdc9ec-ovs-rundir\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.437346 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npd9s\" (UniqueName: \"kubernetes.io/projected/3548faf3-ee23-449d-b44c-5858d2cdc9ec-kube-api-access-npd9s\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.437469 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3548faf3-ee23-449d-b44c-5858d2cdc9ec-ovn-rundir\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.437516 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3548faf3-ee23-449d-b44c-5858d2cdc9ec-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.437567 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3548faf3-ee23-449d-b44c-5858d2cdc9ec-config\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.437600 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3548faf3-ee23-449d-b44c-5858d2cdc9ec-combined-ca-bundle\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.437669 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3548faf3-ee23-449d-b44c-5858d2cdc9ec-ovs-rundir\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.438129 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3548faf3-ee23-449d-b44c-5858d2cdc9ec-ovs-rundir\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.438567 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3548faf3-ee23-449d-b44c-5858d2cdc9ec-ovn-rundir\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.455195 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3548faf3-ee23-449d-b44c-5858d2cdc9ec-config\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.473410 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3548faf3-ee23-449d-b44c-5858d2cdc9ec-combined-ca-bundle\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.475520 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3548faf3-ee23-449d-b44c-5858d2cdc9ec-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.499420 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npd9s\" (UniqueName: \"kubernetes.io/projected/3548faf3-ee23-449d-b44c-5858d2cdc9ec-kube-api-access-npd9s\") pod \"ovn-controller-metrics-kbwmc\" (UID: \"3548faf3-ee23-449d-b44c-5858d2cdc9ec\") " pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.558307 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5hmk"] Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.587778 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-9xbhk"] Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.589125 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.591511 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.616989 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-kbwmc" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.728183 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-9xbhk"] Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.744466 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.744532 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.744553 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-config\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.744595 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.744622 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj4hr\" (UniqueName: \"kubernetes.io/projected/955aa671-f552-4d4a-b656-daab37e833bd-kube-api-access-qj4hr\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: E1205 15:16:31.744783 4840 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 15:16:31 crc kubenswrapper[4840]: E1205 15:16:31.744797 4840 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 15:16:31 crc kubenswrapper[4840]: E1205 15:16:31.744829 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift podName:49fa86fd-482b-426d-9ec6-2c963600851e nodeName:}" failed. No retries permitted until 2025-12-05 15:16:39.744817163 +0000 UTC m=+1078.085879777 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift") pod "swift-storage-0" (UID: "49fa86fd-482b-426d-9ec6-2c963600851e") : configmap "swift-ring-files" not found Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.748467 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 15:16:31 crc kubenswrapper[4840]: E1205 15:16:31.750358 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="7548669d-ea2b-4442-b4b6-f3408d636798" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.845923 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.846005 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.846025 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-config\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.846080 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj4hr\" (UniqueName: \"kubernetes.io/projected/955aa671-f552-4d4a-b656-daab37e833bd-kube-api-access-qj4hr\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.847041 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.847343 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-config\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.847456 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.881666 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj4hr\" (UniqueName: \"kubernetes.io/projected/955aa671-f552-4d4a-b656-daab37e833bd-kube-api-access-qj4hr\") pod \"dnsmasq-dns-57d65f699f-9xbhk\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.882076 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:31 crc kubenswrapper[4840]: E1205 15:16:31.889268 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="0fdc90f7-3261-4c8f-860b-c5f3890d3470" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.894104 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.894425 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 15:16:31 crc kubenswrapper[4840]: E1205 15:16:31.895913 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="7548669d-ea2b-4442-b4b6-f3408d636798" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.937587 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:31 crc kubenswrapper[4840]: I1205 15:16:31.962031 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.573557 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-9xbhk"] Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.615080 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-ccff5"] Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.616926 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.622244 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.629744 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-ccff5"] Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.756729 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5vwv\" (UniqueName: \"kubernetes.io/projected/376013cc-6ae2-4f36-adf2-8aa481b4789f-kube-api-access-w5vwv\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.756784 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.756810 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.756834 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-config\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.756905 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.858102 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.858236 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5vwv\" (UniqueName: \"kubernetes.io/projected/376013cc-6ae2-4f36-adf2-8aa481b4789f-kube-api-access-w5vwv\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.858261 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.858276 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.858297 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-config\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.859160 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.859405 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.860692 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.862542 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-config\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.883426 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5vwv\" (UniqueName: \"kubernetes.io/projected/376013cc-6ae2-4f36-adf2-8aa481b4789f-kube-api-access-w5vwv\") pod \"dnsmasq-dns-b8fbc5445-ccff5\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.889506 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" podUID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerName="dnsmasq-dns" containerID="cri-o://6d4ae6f4ab28ab0e2af13f2c80bd278ab42079100e205595b9bc9ca99e3540f4" gracePeriod=10 Dec 05 15:16:32 crc kubenswrapper[4840]: E1205 15:16:32.890341 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="7548669d-ea2b-4442-b4b6-f3408d636798" Dec 05 15:16:32 crc kubenswrapper[4840]: E1205 15:16:32.890455 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="0fdc90f7-3261-4c8f-860b-c5f3890d3470" Dec 05 15:16:32 crc kubenswrapper[4840]: I1205 15:16:32.947260 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:33 crc kubenswrapper[4840]: I1205 15:16:33.904831 4840 generic.go:334] "Generic (PLEG): container finished" podID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerID="6d4ae6f4ab28ab0e2af13f2c80bd278ab42079100e205595b9bc9ca99e3540f4" exitCode=0 Dec 05 15:16:33 crc kubenswrapper[4840]: I1205 15:16:33.904930 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" event={"ID":"38350ce1-63f1-48d6-b83c-38e883b00db0","Type":"ContainerDied","Data":"6d4ae6f4ab28ab0e2af13f2c80bd278ab42079100e205595b9bc9ca99e3540f4"} Dec 05 15:16:33 crc kubenswrapper[4840]: E1205 15:16:33.907395 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="7548669d-ea2b-4442-b4b6-f3408d636798" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.644604 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:34 crc kubenswrapper[4840]: W1205 15:16:34.738410 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3548faf3_ee23_449d_b44c_5858d2cdc9ec.slice/crio-628b52a6e87a67b9a7da1446161c6090a76a5fb69abf6a8dcb68b8c520a8dd57 WatchSource:0}: Error finding container 628b52a6e87a67b9a7da1446161c6090a76a5fb69abf6a8dcb68b8c520a8dd57: Status 404 returned error can't find the container with id 628b52a6e87a67b9a7da1446161c6090a76a5fb69abf6a8dcb68b8c520a8dd57 Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.739581 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-9xbhk"] Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.770461 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-kbwmc"] Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.798156 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-dns-svc\") pod \"38350ce1-63f1-48d6-b83c-38e883b00db0\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.798237 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgbb4\" (UniqueName: \"kubernetes.io/projected/38350ce1-63f1-48d6-b83c-38e883b00db0-kube-api-access-rgbb4\") pod \"38350ce1-63f1-48d6-b83c-38e883b00db0\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.799222 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-config\") pod \"38350ce1-63f1-48d6-b83c-38e883b00db0\" (UID: \"38350ce1-63f1-48d6-b83c-38e883b00db0\") " Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.812191 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38350ce1-63f1-48d6-b83c-38e883b00db0-kube-api-access-rgbb4" (OuterVolumeSpecName: "kube-api-access-rgbb4") pod "38350ce1-63f1-48d6-b83c-38e883b00db0" (UID: "38350ce1-63f1-48d6-b83c-38e883b00db0"). InnerVolumeSpecName "kube-api-access-rgbb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.877352 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "38350ce1-63f1-48d6-b83c-38e883b00db0" (UID: "38350ce1-63f1-48d6-b83c-38e883b00db0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.886754 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-config" (OuterVolumeSpecName: "config") pod "38350ce1-63f1-48d6-b83c-38e883b00db0" (UID: "38350ce1-63f1-48d6-b83c-38e883b00db0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.911189 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.911212 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38350ce1-63f1-48d6-b83c-38e883b00db0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.911222 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgbb4\" (UniqueName: \"kubernetes.io/projected/38350ce1-63f1-48d6-b83c-38e883b00db0-kube-api-access-rgbb4\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.912619 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-kbwmc" event={"ID":"3548faf3-ee23-449d-b44c-5858d2cdc9ec","Type":"ContainerStarted","Data":"628b52a6e87a67b9a7da1446161c6090a76a5fb69abf6a8dcb68b8c520a8dd57"} Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.915574 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.916460 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-v5hmk" event={"ID":"38350ce1-63f1-48d6-b83c-38e883b00db0","Type":"ContainerDied","Data":"ec4582698d65e96cefea0374f1ea0352afeebc6b6742e49d084b3aff7331ed3e"} Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.916523 4840 scope.go:117] "RemoveContainer" containerID="6d4ae6f4ab28ab0e2af13f2c80bd278ab42079100e205595b9bc9ca99e3540f4" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.920120 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" event={"ID":"955aa671-f552-4d4a-b656-daab37e833bd","Type":"ContainerStarted","Data":"1005e5d601dc7075082b64dd7cbdf2ca9b3aacea0bcb2c38fe993a57575b3676"} Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.921643 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b696t" event={"ID":"45ba7f21-a1e8-4443-816f-91c5392f62df","Type":"ContainerStarted","Data":"93e8fc0e81aecd99c6a195df3c2e37e976d2b9b8207854165bc3de52f5e219d4"} Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.939918 4840 scope.go:117] "RemoveContainer" containerID="deed8b66add3e0f07a8477e6964b6de143961b6ad59d05fa7a83fb559d1d29f2" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.943742 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-b696t" podStartSLOduration=2.8049212690000003 podStartE2EDuration="7.943719747s" podCreationTimestamp="2025-12-05 15:16:27 +0000 UTC" firstStartedPulling="2025-12-05 15:16:29.07322334 +0000 UTC m=+1067.414285954" lastFinishedPulling="2025-12-05 15:16:34.212021818 +0000 UTC m=+1072.553084432" observedRunningTime="2025-12-05 15:16:34.938153569 +0000 UTC m=+1073.279216183" watchObservedRunningTime="2025-12-05 15:16:34.943719747 +0000 UTC m=+1073.284782361" Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.958886 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5hmk"] Dec 05 15:16:34 crc kubenswrapper[4840]: I1205 15:16:34.966252 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-v5hmk"] Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.082757 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-ccff5"] Dec 05 15:16:35 crc kubenswrapper[4840]: W1205 15:16:35.094779 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod376013cc_6ae2_4f36_adf2_8aa481b4789f.slice/crio-458cc426991ee34b2b94c65835b8765f45bbe1d85aa8560b3d4153f99c739e69 WatchSource:0}: Error finding container 458cc426991ee34b2b94c65835b8765f45bbe1d85aa8560b3d4153f99c739e69: Status 404 returned error can't find the container with id 458cc426991ee34b2b94c65835b8765f45bbe1d85aa8560b3d4153f99c739e69 Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.933518 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-kbwmc" event={"ID":"3548faf3-ee23-449d-b44c-5858d2cdc9ec","Type":"ContainerStarted","Data":"481f4810171ca785c3c22bf8d1d8699886d92b1b6b0b41e2136c1c473419ded4"} Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.937835 4840 generic.go:334] "Generic (PLEG): container finished" podID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerID="4d041a905acc7c052bc51380ecff9a15d22058aa5c28356f2aed639188cd6158" exitCode=0 Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.937917 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" event={"ID":"376013cc-6ae2-4f36-adf2-8aa481b4789f","Type":"ContainerDied","Data":"4d041a905acc7c052bc51380ecff9a15d22058aa5c28356f2aed639188cd6158"} Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.937942 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" event={"ID":"376013cc-6ae2-4f36-adf2-8aa481b4789f","Type":"ContainerStarted","Data":"458cc426991ee34b2b94c65835b8765f45bbe1d85aa8560b3d4153f99c739e69"} Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.943930 4840 generic.go:334] "Generic (PLEG): container finished" podID="955aa671-f552-4d4a-b656-daab37e833bd" containerID="d3f0306a969b11c5db0cb9bf7da303136d93d0a00cd5b84a2e57a25632d79047" exitCode=0 Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.944019 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" event={"ID":"955aa671-f552-4d4a-b656-daab37e833bd","Type":"ContainerDied","Data":"d3f0306a969b11c5db0cb9bf7da303136d93d0a00cd5b84a2e57a25632d79047"} Dec 05 15:16:35 crc kubenswrapper[4840]: I1205 15:16:35.967380 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-kbwmc" podStartSLOduration=4.519158172 podStartE2EDuration="4.967357394s" podCreationTimestamp="2025-12-05 15:16:31 +0000 UTC" firstStartedPulling="2025-12-05 15:16:34.740201814 +0000 UTC m=+1073.081264418" lastFinishedPulling="2025-12-05 15:16:35.188401026 +0000 UTC m=+1073.529463640" observedRunningTime="2025-12-05 15:16:35.957086323 +0000 UTC m=+1074.298148977" watchObservedRunningTime="2025-12-05 15:16:35.967357394 +0000 UTC m=+1074.308420018" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.087046 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38350ce1-63f1-48d6-b83c-38e883b00db0" path="/var/lib/kubelet/pods/38350ce1-63f1-48d6-b83c-38e883b00db0/volumes" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.396742 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.535937 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-config\") pod \"955aa671-f552-4d4a-b656-daab37e833bd\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.536020 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-ovsdbserver-nb\") pod \"955aa671-f552-4d4a-b656-daab37e833bd\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.536061 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc\") pod \"955aa671-f552-4d4a-b656-daab37e833bd\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.536091 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qj4hr\" (UniqueName: \"kubernetes.io/projected/955aa671-f552-4d4a-b656-daab37e833bd-kube-api-access-qj4hr\") pod \"955aa671-f552-4d4a-b656-daab37e833bd\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.540942 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/955aa671-f552-4d4a-b656-daab37e833bd-kube-api-access-qj4hr" (OuterVolumeSpecName: "kube-api-access-qj4hr") pod "955aa671-f552-4d4a-b656-daab37e833bd" (UID: "955aa671-f552-4d4a-b656-daab37e833bd"). InnerVolumeSpecName "kube-api-access-qj4hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.559382 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "955aa671-f552-4d4a-b656-daab37e833bd" (UID: "955aa671-f552-4d4a-b656-daab37e833bd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:36 crc kubenswrapper[4840]: E1205 15:16:36.562035 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc podName:955aa671-f552-4d4a-b656-daab37e833bd nodeName:}" failed. No retries permitted until 2025-12-05 15:16:37.062001863 +0000 UTC m=+1075.403064477 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc") pod "955aa671-f552-4d4a-b656-daab37e833bd" (UID: "955aa671-f552-4d4a-b656-daab37e833bd") : error deleting /var/lib/kubelet/pods/955aa671-f552-4d4a-b656-daab37e833bd/volume-subpaths: remove /var/lib/kubelet/pods/955aa671-f552-4d4a-b656-daab37e833bd/volume-subpaths: no such file or directory Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.562275 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-config" (OuterVolumeSpecName: "config") pod "955aa671-f552-4d4a-b656-daab37e833bd" (UID: "955aa671-f552-4d4a-b656-daab37e833bd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.637311 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.637348 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.637361 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qj4hr\" (UniqueName: \"kubernetes.io/projected/955aa671-f552-4d4a-b656-daab37e833bd-kube-api-access-qj4hr\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.852526 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.936353 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.952644 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" event={"ID":"376013cc-6ae2-4f36-adf2-8aa481b4789f","Type":"ContainerStarted","Data":"0a3e6f3200f151a74a5f6dedf9b48a7b0f81d62ca7d8f4a6165dbfa27d38c4a7"} Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.952775 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.955089 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" event={"ID":"955aa671-f552-4d4a-b656-daab37e833bd","Type":"ContainerDied","Data":"1005e5d601dc7075082b64dd7cbdf2ca9b3aacea0bcb2c38fe993a57575b3676"} Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.955131 4840 scope.go:117] "RemoveContainer" containerID="d3f0306a969b11c5db0cb9bf7da303136d93d0a00cd5b84a2e57a25632d79047" Dec 05 15:16:36 crc kubenswrapper[4840]: I1205 15:16:36.955135 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-9xbhk" Dec 05 15:16:37 crc kubenswrapper[4840]: I1205 15:16:37.020642 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" podStartSLOduration=5.02062392 podStartE2EDuration="5.02062392s" podCreationTimestamp="2025-12-05 15:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:37.018178751 +0000 UTC m=+1075.359241375" watchObservedRunningTime="2025-12-05 15:16:37.02062392 +0000 UTC m=+1075.361686534" Dec 05 15:16:37 crc kubenswrapper[4840]: I1205 15:16:37.143189 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc\") pod \"955aa671-f552-4d4a-b656-daab37e833bd\" (UID: \"955aa671-f552-4d4a-b656-daab37e833bd\") " Dec 05 15:16:37 crc kubenswrapper[4840]: I1205 15:16:37.143581 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "955aa671-f552-4d4a-b656-daab37e833bd" (UID: "955aa671-f552-4d4a-b656-daab37e833bd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:37 crc kubenswrapper[4840]: I1205 15:16:37.144298 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/955aa671-f552-4d4a-b656-daab37e833bd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:37 crc kubenswrapper[4840]: I1205 15:16:37.362906 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-9xbhk"] Dec 05 15:16:37 crc kubenswrapper[4840]: I1205 15:16:37.376535 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-9xbhk"] Dec 05 15:16:38 crc kubenswrapper[4840]: I1205 15:16:38.086594 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="955aa671-f552-4d4a-b656-daab37e833bd" path="/var/lib/kubelet/pods/955aa671-f552-4d4a-b656-daab37e833bd/volumes" Dec 05 15:16:39 crc kubenswrapper[4840]: I1205 15:16:39.018308 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 15:16:39 crc kubenswrapper[4840]: I1205 15:16:39.018367 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 15:16:39 crc kubenswrapper[4840]: I1205 15:16:39.087877 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 15:16:39 crc kubenswrapper[4840]: I1205 15:16:39.804806 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:39 crc kubenswrapper[4840]: E1205 15:16:39.804984 4840 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 15:16:39 crc kubenswrapper[4840]: E1205 15:16:39.805365 4840 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 15:16:39 crc kubenswrapper[4840]: E1205 15:16:39.805420 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift podName:49fa86fd-482b-426d-9ec6-2c963600851e nodeName:}" failed. No retries permitted until 2025-12-05 15:16:55.805404678 +0000 UTC m=+1094.146467292 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift") pod "swift-storage-0" (UID: "49fa86fd-482b-426d-9ec6-2c963600851e") : configmap "swift-ring-files" not found Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.064651 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.717688 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-aff3-account-create-update-lh6jh"] Dec 05 15:16:40 crc kubenswrapper[4840]: E1205 15:16:40.718092 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerName="dnsmasq-dns" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.718108 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerName="dnsmasq-dns" Dec 05 15:16:40 crc kubenswrapper[4840]: E1205 15:16:40.718144 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerName="init" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.718152 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerName="init" Dec 05 15:16:40 crc kubenswrapper[4840]: E1205 15:16:40.718168 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955aa671-f552-4d4a-b656-daab37e833bd" containerName="init" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.718177 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="955aa671-f552-4d4a-b656-daab37e833bd" containerName="init" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.718357 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="955aa671-f552-4d4a-b656-daab37e833bd" containerName="init" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.718389 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="38350ce1-63f1-48d6-b83c-38e883b00db0" containerName="dnsmasq-dns" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.718994 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.721361 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.748432 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-aff3-account-create-update-lh6jh"] Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.802698 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-5x7zs"] Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.804427 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.810442 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5x7zs"] Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.825911 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-762q5\" (UniqueName: \"kubernetes.io/projected/e4390a73-4bb5-42f6-a4dc-516f9657db60-kube-api-access-762q5\") pod \"keystone-aff3-account-create-update-lh6jh\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.825965 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb1bdb27-1669-4d51-b689-4a48bd5f1567-operator-scripts\") pod \"keystone-db-create-5x7zs\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.825982 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq7j6\" (UniqueName: \"kubernetes.io/projected/eb1bdb27-1669-4d51-b689-4a48bd5f1567-kube-api-access-mq7j6\") pod \"keystone-db-create-5x7zs\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.826084 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4390a73-4bb5-42f6-a4dc-516f9657db60-operator-scripts\") pod \"keystone-aff3-account-create-update-lh6jh\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.927245 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4390a73-4bb5-42f6-a4dc-516f9657db60-operator-scripts\") pod \"keystone-aff3-account-create-update-lh6jh\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.927341 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-762q5\" (UniqueName: \"kubernetes.io/projected/e4390a73-4bb5-42f6-a4dc-516f9657db60-kube-api-access-762q5\") pod \"keystone-aff3-account-create-update-lh6jh\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.927392 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb1bdb27-1669-4d51-b689-4a48bd5f1567-operator-scripts\") pod \"keystone-db-create-5x7zs\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.927414 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq7j6\" (UniqueName: \"kubernetes.io/projected/eb1bdb27-1669-4d51-b689-4a48bd5f1567-kube-api-access-mq7j6\") pod \"keystone-db-create-5x7zs\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.928298 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb1bdb27-1669-4d51-b689-4a48bd5f1567-operator-scripts\") pod \"keystone-db-create-5x7zs\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.929431 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4390a73-4bb5-42f6-a4dc-516f9657db60-operator-scripts\") pod \"keystone-aff3-account-create-update-lh6jh\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.948318 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-762q5\" (UniqueName: \"kubernetes.io/projected/e4390a73-4bb5-42f6-a4dc-516f9657db60-kube-api-access-762q5\") pod \"keystone-aff3-account-create-update-lh6jh\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.949305 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq7j6\" (UniqueName: \"kubernetes.io/projected/eb1bdb27-1669-4d51-b689-4a48bd5f1567-kube-api-access-mq7j6\") pod \"keystone-db-create-5x7zs\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.990750 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-r67qk"] Dec 05 15:16:40 crc kubenswrapper[4840]: I1205 15:16:40.992837 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.000673 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-r67qk"] Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.035630 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-251a-account-create-update-mcv4t"] Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.036811 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.039846 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.051395 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-251a-account-create-update-mcv4t"] Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.069275 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.125283 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.131859 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41783d81-4e56-4e4e-9335-7f978fb478c6-operator-scripts\") pod \"placement-db-create-r67qk\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.132076 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlpgg\" (UniqueName: \"kubernetes.io/projected/41783d81-4e56-4e4e-9335-7f978fb478c6-kube-api-access-tlpgg\") pod \"placement-db-create-r67qk\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.233191 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qf7w\" (UniqueName: \"kubernetes.io/projected/c2f3441f-bb15-467f-b962-5c8b66b27cfe-kube-api-access-4qf7w\") pod \"placement-251a-account-create-update-mcv4t\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.233573 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlpgg\" (UniqueName: \"kubernetes.io/projected/41783d81-4e56-4e4e-9335-7f978fb478c6-kube-api-access-tlpgg\") pod \"placement-db-create-r67qk\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.233688 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41783d81-4e56-4e4e-9335-7f978fb478c6-operator-scripts\") pod \"placement-db-create-r67qk\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.233841 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2f3441f-bb15-467f-b962-5c8b66b27cfe-operator-scripts\") pod \"placement-251a-account-create-update-mcv4t\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.235426 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41783d81-4e56-4e4e-9335-7f978fb478c6-operator-scripts\") pod \"placement-db-create-r67qk\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.252728 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlpgg\" (UniqueName: \"kubernetes.io/projected/41783d81-4e56-4e4e-9335-7f978fb478c6-kube-api-access-tlpgg\") pod \"placement-db-create-r67qk\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.335705 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2f3441f-bb15-467f-b962-5c8b66b27cfe-operator-scripts\") pod \"placement-251a-account-create-update-mcv4t\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.335755 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qf7w\" (UniqueName: \"kubernetes.io/projected/c2f3441f-bb15-467f-b962-5c8b66b27cfe-kube-api-access-4qf7w\") pod \"placement-251a-account-create-update-mcv4t\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.336893 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2f3441f-bb15-467f-b962-5c8b66b27cfe-operator-scripts\") pod \"placement-251a-account-create-update-mcv4t\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.346075 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r67qk" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.352177 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qf7w\" (UniqueName: \"kubernetes.io/projected/c2f3441f-bb15-467f-b962-5c8b66b27cfe-kube-api-access-4qf7w\") pod \"placement-251a-account-create-update-mcv4t\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.358076 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.417351 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-cttvn" podUID="ab69b71e-d666-46a7-a896-96a70fff685a" containerName="ovn-controller" probeResult="failure" output=< Dec 05 15:16:41 crc kubenswrapper[4840]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 15:16:41 crc kubenswrapper[4840]: > Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.443130 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.450343 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rnf5z" Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.523775 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-aff3-account-create-update-lh6jh"] Dec 05 15:16:41 crc kubenswrapper[4840]: W1205 15:16:41.577081 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4390a73_4bb5_42f6_a4dc_516f9657db60.slice/crio-221630d1f2d19e98981cee7732ed47b171981f4b034bf36de57b7d4313c94eb2 WatchSource:0}: Error finding container 221630d1f2d19e98981cee7732ed47b171981f4b034bf36de57b7d4313c94eb2: Status 404 returned error can't find the container with id 221630d1f2d19e98981cee7732ed47b171981f4b034bf36de57b7d4313c94eb2 Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.656789 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5x7zs"] Dec 05 15:16:41 crc kubenswrapper[4840]: W1205 15:16:41.751145 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2f3441f_bb15_467f_b962_5c8b66b27cfe.slice/crio-07ffc270e722ee10a938d8c069cedbea6ae84e403a5972d5bf0befc844bfcf2d WatchSource:0}: Error finding container 07ffc270e722ee10a938d8c069cedbea6ae84e403a5972d5bf0befc844bfcf2d: Status 404 returned error can't find the container with id 07ffc270e722ee10a938d8c069cedbea6ae84e403a5972d5bf0befc844bfcf2d Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.754983 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-251a-account-create-update-mcv4t"] Dec 05 15:16:41 crc kubenswrapper[4840]: I1205 15:16:41.820072 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-r67qk"] Dec 05 15:16:41 crc kubenswrapper[4840]: W1205 15:16:41.827473 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41783d81_4e56_4e4e_9335_7f978fb478c6.slice/crio-e742ff4b5e6653eff56df12c0fd6c3ca47fb3b828c30bf0896aa9039e791f24e WatchSource:0}: Error finding container e742ff4b5e6653eff56df12c0fd6c3ca47fb3b828c30bf0896aa9039e791f24e: Status 404 returned error can't find the container with id e742ff4b5e6653eff56df12c0fd6c3ca47fb3b828c30bf0896aa9039e791f24e Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.002302 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-251a-account-create-update-mcv4t" event={"ID":"c2f3441f-bb15-467f-b962-5c8b66b27cfe","Type":"ContainerStarted","Data":"f2f41802d6f41dd2f191c6987238c7da7df6a159bf374fa300d59f7d07121acd"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.002370 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-251a-account-create-update-mcv4t" event={"ID":"c2f3441f-bb15-467f-b962-5c8b66b27cfe","Type":"ContainerStarted","Data":"07ffc270e722ee10a938d8c069cedbea6ae84e403a5972d5bf0befc844bfcf2d"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.003732 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5x7zs" event={"ID":"eb1bdb27-1669-4d51-b689-4a48bd5f1567","Type":"ContainerStarted","Data":"f53a3fd1a8a3f346988b00fc0abb1c8bcf0851620c8f5dd94cb0067bc2baf7b7"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.003768 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5x7zs" event={"ID":"eb1bdb27-1669-4d51-b689-4a48bd5f1567","Type":"ContainerStarted","Data":"f8e581a671f5cfef42a32e400c49cce7adec9e1e2c037be6908588d773ec704d"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.005194 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-r67qk" event={"ID":"41783d81-4e56-4e4e-9335-7f978fb478c6","Type":"ContainerStarted","Data":"b9ccaa47bbdee3a059eb000267ae680b212947bfda89c55bbeb303e6b8627db0"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.005261 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-r67qk" event={"ID":"41783d81-4e56-4e4e-9335-7f978fb478c6","Type":"ContainerStarted","Data":"e742ff4b5e6653eff56df12c0fd6c3ca47fb3b828c30bf0896aa9039e791f24e"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.006369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-aff3-account-create-update-lh6jh" event={"ID":"e4390a73-4bb5-42f6-a4dc-516f9657db60","Type":"ContainerStarted","Data":"a276c9c6ae36bf4ea30ff05e9af35c521da6368de8e9ff25ca285772363bb4b5"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.006407 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-aff3-account-create-update-lh6jh" event={"ID":"e4390a73-4bb5-42f6-a4dc-516f9657db60","Type":"ContainerStarted","Data":"221630d1f2d19e98981cee7732ed47b171981f4b034bf36de57b7d4313c94eb2"} Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.038472 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-251a-account-create-update-mcv4t" podStartSLOduration=1.038453762 podStartE2EDuration="1.038453762s" podCreationTimestamp="2025-12-05 15:16:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:42.023841768 +0000 UTC m=+1080.364904392" watchObservedRunningTime="2025-12-05 15:16:42.038453762 +0000 UTC m=+1080.379516376" Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.042748 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-5x7zs" podStartSLOduration=2.042738323 podStartE2EDuration="2.042738323s" podCreationTimestamp="2025-12-05 15:16:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:42.035156228 +0000 UTC m=+1080.376218842" watchObservedRunningTime="2025-12-05 15:16:42.042738323 +0000 UTC m=+1080.383800937" Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.054181 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-aff3-account-create-update-lh6jh" podStartSLOduration=2.054167007 podStartE2EDuration="2.054167007s" podCreationTimestamp="2025-12-05 15:16:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:42.051672896 +0000 UTC m=+1080.392735530" watchObservedRunningTime="2025-12-05 15:16:42.054167007 +0000 UTC m=+1080.395229611" Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.071985 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-r67qk" podStartSLOduration=2.071964781 podStartE2EDuration="2.071964781s" podCreationTimestamp="2025-12-05 15:16:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:42.065677263 +0000 UTC m=+1080.406739867" watchObservedRunningTime="2025-12-05 15:16:42.071964781 +0000 UTC m=+1080.413027395" Dec 05 15:16:42 crc kubenswrapper[4840]: I1205 15:16:42.949174 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.017674 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-v7fw8"] Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.017934 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" podUID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerName="dnsmasq-dns" containerID="cri-o://be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490" gracePeriod=10 Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.023480 4840 generic.go:334] "Generic (PLEG): container finished" podID="eb1bdb27-1669-4d51-b689-4a48bd5f1567" containerID="f53a3fd1a8a3f346988b00fc0abb1c8bcf0851620c8f5dd94cb0067bc2baf7b7" exitCode=0 Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.023578 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5x7zs" event={"ID":"eb1bdb27-1669-4d51-b689-4a48bd5f1567","Type":"ContainerDied","Data":"f53a3fd1a8a3f346988b00fc0abb1c8bcf0851620c8f5dd94cb0067bc2baf7b7"} Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.026876 4840 generic.go:334] "Generic (PLEG): container finished" podID="41783d81-4e56-4e4e-9335-7f978fb478c6" containerID="b9ccaa47bbdee3a059eb000267ae680b212947bfda89c55bbeb303e6b8627db0" exitCode=0 Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.026980 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-r67qk" event={"ID":"41783d81-4e56-4e4e-9335-7f978fb478c6","Type":"ContainerDied","Data":"b9ccaa47bbdee3a059eb000267ae680b212947bfda89c55bbeb303e6b8627db0"} Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.036019 4840 generic.go:334] "Generic (PLEG): container finished" podID="e4390a73-4bb5-42f6-a4dc-516f9657db60" containerID="a276c9c6ae36bf4ea30ff05e9af35c521da6368de8e9ff25ca285772363bb4b5" exitCode=0 Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.036134 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-aff3-account-create-update-lh6jh" event={"ID":"e4390a73-4bb5-42f6-a4dc-516f9657db60","Type":"ContainerDied","Data":"a276c9c6ae36bf4ea30ff05e9af35c521da6368de8e9ff25ca285772363bb4b5"} Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.055054 4840 generic.go:334] "Generic (PLEG): container finished" podID="c2f3441f-bb15-467f-b962-5c8b66b27cfe" containerID="f2f41802d6f41dd2f191c6987238c7da7df6a159bf374fa300d59f7d07121acd" exitCode=0 Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.055096 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-251a-account-create-update-mcv4t" event={"ID":"c2f3441f-bb15-467f-b962-5c8b66b27cfe","Type":"ContainerDied","Data":"f2f41802d6f41dd2f191c6987238c7da7df6a159bf374fa300d59f7d07121acd"} Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.577915 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.687427 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-config\") pod \"d4640d84-ab2c-41ae-b84b-461d096d1e28\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.687551 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-dns-svc\") pod \"d4640d84-ab2c-41ae-b84b-461d096d1e28\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.687670 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqtdq\" (UniqueName: \"kubernetes.io/projected/d4640d84-ab2c-41ae-b84b-461d096d1e28-kube-api-access-xqtdq\") pod \"d4640d84-ab2c-41ae-b84b-461d096d1e28\" (UID: \"d4640d84-ab2c-41ae-b84b-461d096d1e28\") " Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.695118 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4640d84-ab2c-41ae-b84b-461d096d1e28-kube-api-access-xqtdq" (OuterVolumeSpecName: "kube-api-access-xqtdq") pod "d4640d84-ab2c-41ae-b84b-461d096d1e28" (UID: "d4640d84-ab2c-41ae-b84b-461d096d1e28"). InnerVolumeSpecName "kube-api-access-xqtdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.731475 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d4640d84-ab2c-41ae-b84b-461d096d1e28" (UID: "d4640d84-ab2c-41ae-b84b-461d096d1e28"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.741358 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-config" (OuterVolumeSpecName: "config") pod "d4640d84-ab2c-41ae-b84b-461d096d1e28" (UID: "d4640d84-ab2c-41ae-b84b-461d096d1e28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.789330 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqtdq\" (UniqueName: \"kubernetes.io/projected/d4640d84-ab2c-41ae-b84b-461d096d1e28-kube-api-access-xqtdq\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.789362 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:43 crc kubenswrapper[4840]: I1205 15:16:43.789373 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4640d84-ab2c-41ae-b84b-461d096d1e28-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.063609 4840 generic.go:334] "Generic (PLEG): container finished" podID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerID="be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490" exitCode=0 Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.063689 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.063689 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" event={"ID":"d4640d84-ab2c-41ae-b84b-461d096d1e28","Type":"ContainerDied","Data":"be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490"} Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.063805 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-v7fw8" event={"ID":"d4640d84-ab2c-41ae-b84b-461d096d1e28","Type":"ContainerDied","Data":"a0408ed164080bd88ae22dc1e05a4c1922c38b17ab64e831eab6b25a9bd9dcdb"} Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.063829 4840 scope.go:117] "RemoveContainer" containerID="be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.065204 4840 generic.go:334] "Generic (PLEG): container finished" podID="45ba7f21-a1e8-4443-816f-91c5392f62df" containerID="93e8fc0e81aecd99c6a195df3c2e37e976d2b9b8207854165bc3de52f5e219d4" exitCode=0 Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.065246 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b696t" event={"ID":"45ba7f21-a1e8-4443-816f-91c5392f62df","Type":"ContainerDied","Data":"93e8fc0e81aecd99c6a195df3c2e37e976d2b9b8207854165bc3de52f5e219d4"} Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.091709 4840 scope.go:117] "RemoveContainer" containerID="f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.132946 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-v7fw8"] Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.139183 4840 scope.go:117] "RemoveContainer" containerID="be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490" Dec 05 15:16:44 crc kubenswrapper[4840]: E1205 15:16:44.140014 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490\": container with ID starting with be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490 not found: ID does not exist" containerID="be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.140058 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490"} err="failed to get container status \"be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490\": rpc error: code = NotFound desc = could not find container \"be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490\": container with ID starting with be6a2ccf08ba18c7ebb70ab7921803bd581de2ad924024c6152d7aa71ba1c490 not found: ID does not exist" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.140086 4840 scope.go:117] "RemoveContainer" containerID="f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a" Dec 05 15:16:44 crc kubenswrapper[4840]: E1205 15:16:44.140329 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a\": container with ID starting with f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a not found: ID does not exist" containerID="f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.140360 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a"} err="failed to get container status \"f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a\": rpc error: code = NotFound desc = could not find container \"f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a\": container with ID starting with f90976bc793925c111ba3387e479687cc551c6e2dbfbb16ea67bbbda03235f1a not found: ID does not exist" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.149055 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-v7fw8"] Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.505605 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.600259 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2f3441f-bb15-467f-b962-5c8b66b27cfe-operator-scripts\") pod \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.600397 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qf7w\" (UniqueName: \"kubernetes.io/projected/c2f3441f-bb15-467f-b962-5c8b66b27cfe-kube-api-access-4qf7w\") pod \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\" (UID: \"c2f3441f-bb15-467f-b962-5c8b66b27cfe\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.601279 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2f3441f-bb15-467f-b962-5c8b66b27cfe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c2f3441f-bb15-467f-b962-5c8b66b27cfe" (UID: "c2f3441f-bb15-467f-b962-5c8b66b27cfe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.604366 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2f3441f-bb15-467f-b962-5c8b66b27cfe-kube-api-access-4qf7w" (OuterVolumeSpecName: "kube-api-access-4qf7w") pod "c2f3441f-bb15-467f-b962-5c8b66b27cfe" (UID: "c2f3441f-bb15-467f-b962-5c8b66b27cfe"). InnerVolumeSpecName "kube-api-access-4qf7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.623096 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.629298 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.699651 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r67qk" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.742711 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb1bdb27-1669-4d51-b689-4a48bd5f1567-operator-scripts\") pod \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.742795 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq7j6\" (UniqueName: \"kubernetes.io/projected/eb1bdb27-1669-4d51-b689-4a48bd5f1567-kube-api-access-mq7j6\") pod \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\" (UID: \"eb1bdb27-1669-4d51-b689-4a48bd5f1567\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.742829 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4390a73-4bb5-42f6-a4dc-516f9657db60-operator-scripts\") pod \"e4390a73-4bb5-42f6-a4dc-516f9657db60\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.742893 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-762q5\" (UniqueName: \"kubernetes.io/projected/e4390a73-4bb5-42f6-a4dc-516f9657db60-kube-api-access-762q5\") pod \"e4390a73-4bb5-42f6-a4dc-516f9657db60\" (UID: \"e4390a73-4bb5-42f6-a4dc-516f9657db60\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.743260 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2f3441f-bb15-467f-b962-5c8b66b27cfe-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.743289 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qf7w\" (UniqueName: \"kubernetes.io/projected/c2f3441f-bb15-467f-b962-5c8b66b27cfe-kube-api-access-4qf7w\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.745543 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb1bdb27-1669-4d51-b689-4a48bd5f1567-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb1bdb27-1669-4d51-b689-4a48bd5f1567" (UID: "eb1bdb27-1669-4d51-b689-4a48bd5f1567"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.746080 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4390a73-4bb5-42f6-a4dc-516f9657db60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e4390a73-4bb5-42f6-a4dc-516f9657db60" (UID: "e4390a73-4bb5-42f6-a4dc-516f9657db60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.748435 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4390a73-4bb5-42f6-a4dc-516f9657db60-kube-api-access-762q5" (OuterVolumeSpecName: "kube-api-access-762q5") pod "e4390a73-4bb5-42f6-a4dc-516f9657db60" (UID: "e4390a73-4bb5-42f6-a4dc-516f9657db60"). InnerVolumeSpecName "kube-api-access-762q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.755248 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb1bdb27-1669-4d51-b689-4a48bd5f1567-kube-api-access-mq7j6" (OuterVolumeSpecName: "kube-api-access-mq7j6") pod "eb1bdb27-1669-4d51-b689-4a48bd5f1567" (UID: "eb1bdb27-1669-4d51-b689-4a48bd5f1567"). InnerVolumeSpecName "kube-api-access-mq7j6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.844454 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlpgg\" (UniqueName: \"kubernetes.io/projected/41783d81-4e56-4e4e-9335-7f978fb478c6-kube-api-access-tlpgg\") pod \"41783d81-4e56-4e4e-9335-7f978fb478c6\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.844653 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41783d81-4e56-4e4e-9335-7f978fb478c6-operator-scripts\") pod \"41783d81-4e56-4e4e-9335-7f978fb478c6\" (UID: \"41783d81-4e56-4e4e-9335-7f978fb478c6\") " Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.845698 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41783d81-4e56-4e4e-9335-7f978fb478c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "41783d81-4e56-4e4e-9335-7f978fb478c6" (UID: "41783d81-4e56-4e4e-9335-7f978fb478c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.846598 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41783d81-4e56-4e4e-9335-7f978fb478c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.846628 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb1bdb27-1669-4d51-b689-4a48bd5f1567-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.846649 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq7j6\" (UniqueName: \"kubernetes.io/projected/eb1bdb27-1669-4d51-b689-4a48bd5f1567-kube-api-access-mq7j6\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.846663 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e4390a73-4bb5-42f6-a4dc-516f9657db60-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.846672 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-762q5\" (UniqueName: \"kubernetes.io/projected/e4390a73-4bb5-42f6-a4dc-516f9657db60-kube-api-access-762q5\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.848826 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41783d81-4e56-4e4e-9335-7f978fb478c6-kube-api-access-tlpgg" (OuterVolumeSpecName: "kube-api-access-tlpgg") pod "41783d81-4e56-4e4e-9335-7f978fb478c6" (UID: "41783d81-4e56-4e4e-9335-7f978fb478c6"). InnerVolumeSpecName "kube-api-access-tlpgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:44 crc kubenswrapper[4840]: I1205 15:16:44.948278 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlpgg\" (UniqueName: \"kubernetes.io/projected/41783d81-4e56-4e4e-9335-7f978fb478c6-kube-api-access-tlpgg\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.087350 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-251a-account-create-update-mcv4t" event={"ID":"c2f3441f-bb15-467f-b962-5c8b66b27cfe","Type":"ContainerDied","Data":"07ffc270e722ee10a938d8c069cedbea6ae84e403a5972d5bf0befc844bfcf2d"} Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.087680 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07ffc270e722ee10a938d8c069cedbea6ae84e403a5972d5bf0befc844bfcf2d" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.087732 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-251a-account-create-update-mcv4t" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.090026 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5x7zs" event={"ID":"eb1bdb27-1669-4d51-b689-4a48bd5f1567","Type":"ContainerDied","Data":"f8e581a671f5cfef42a32e400c49cce7adec9e1e2c037be6908588d773ec704d"} Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.090063 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8e581a671f5cfef42a32e400c49cce7adec9e1e2c037be6908588d773ec704d" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.090150 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5x7zs" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.096179 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r67qk" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.096184 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-r67qk" event={"ID":"41783d81-4e56-4e4e-9335-7f978fb478c6","Type":"ContainerDied","Data":"e742ff4b5e6653eff56df12c0fd6c3ca47fb3b828c30bf0896aa9039e791f24e"} Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.096996 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e742ff4b5e6653eff56df12c0fd6c3ca47fb3b828c30bf0896aa9039e791f24e" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.100100 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-aff3-account-create-update-lh6jh" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.100101 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-aff3-account-create-update-lh6jh" event={"ID":"e4390a73-4bb5-42f6-a4dc-516f9657db60","Type":"ContainerDied","Data":"221630d1f2d19e98981cee7732ed47b171981f4b034bf36de57b7d4313c94eb2"} Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.103283 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="221630d1f2d19e98981cee7732ed47b171981f4b034bf36de57b7d4313c94eb2" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.811438 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.982815 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd5dm\" (UniqueName: \"kubernetes.io/projected/45ba7f21-a1e8-4443-816f-91c5392f62df-kube-api-access-bd5dm\") pod \"45ba7f21-a1e8-4443-816f-91c5392f62df\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.982875 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45ba7f21-a1e8-4443-816f-91c5392f62df-etc-swift\") pod \"45ba7f21-a1e8-4443-816f-91c5392f62df\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.982915 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-dispersionconf\") pod \"45ba7f21-a1e8-4443-816f-91c5392f62df\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.982983 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-ring-data-devices\") pod \"45ba7f21-a1e8-4443-816f-91c5392f62df\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.983070 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-swiftconf\") pod \"45ba7f21-a1e8-4443-816f-91c5392f62df\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.983142 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-combined-ca-bundle\") pod \"45ba7f21-a1e8-4443-816f-91c5392f62df\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.983167 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-scripts\") pod \"45ba7f21-a1e8-4443-816f-91c5392f62df\" (UID: \"45ba7f21-a1e8-4443-816f-91c5392f62df\") " Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.984140 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "45ba7f21-a1e8-4443-816f-91c5392f62df" (UID: "45ba7f21-a1e8-4443-816f-91c5392f62df"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.984491 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45ba7f21-a1e8-4443-816f-91c5392f62df-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "45ba7f21-a1e8-4443-816f-91c5392f62df" (UID: "45ba7f21-a1e8-4443-816f-91c5392f62df"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:16:45 crc kubenswrapper[4840]: I1205 15:16:45.989723 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45ba7f21-a1e8-4443-816f-91c5392f62df-kube-api-access-bd5dm" (OuterVolumeSpecName: "kube-api-access-bd5dm") pod "45ba7f21-a1e8-4443-816f-91c5392f62df" (UID: "45ba7f21-a1e8-4443-816f-91c5392f62df"). InnerVolumeSpecName "kube-api-access-bd5dm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.008786 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "45ba7f21-a1e8-4443-816f-91c5392f62df" (UID: "45ba7f21-a1e8-4443-816f-91c5392f62df"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.010889 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "45ba7f21-a1e8-4443-816f-91c5392f62df" (UID: "45ba7f21-a1e8-4443-816f-91c5392f62df"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.010753 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45ba7f21-a1e8-4443-816f-91c5392f62df" (UID: "45ba7f21-a1e8-4443-816f-91c5392f62df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.020335 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-scripts" (OuterVolumeSpecName: "scripts") pod "45ba7f21-a1e8-4443-816f-91c5392f62df" (UID: "45ba7f21-a1e8-4443-816f-91c5392f62df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.084573 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4640d84-ab2c-41ae-b84b-461d096d1e28" path="/var/lib/kubelet/pods/d4640d84-ab2c-41ae-b84b-461d096d1e28/volumes" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.084941 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.085075 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.085086 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd5dm\" (UniqueName: \"kubernetes.io/projected/45ba7f21-a1e8-4443-816f-91c5392f62df-kube-api-access-bd5dm\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.085096 4840 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45ba7f21-a1e8-4443-816f-91c5392f62df-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.085106 4840 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.085115 4840 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45ba7f21-a1e8-4443-816f-91c5392f62df-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.085123 4840 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45ba7f21-a1e8-4443-816f-91c5392f62df-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.114098 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-b696t" event={"ID":"45ba7f21-a1e8-4443-816f-91c5392f62df","Type":"ContainerDied","Data":"59dc59e94400bd39cb14459fb6fc4585bf66db4c84f9107b0a73d8f3317115bb"} Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.114602 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59dc59e94400bd39cb14459fb6fc4585bf66db4c84f9107b0a73d8f3317115bb" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.114735 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-b696t" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.363223 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-bmclv"] Dec 05 15:16:46 crc kubenswrapper[4840]: E1205 15:16:46.364242 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45ba7f21-a1e8-4443-816f-91c5392f62df" containerName="swift-ring-rebalance" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.364424 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="45ba7f21-a1e8-4443-816f-91c5392f62df" containerName="swift-ring-rebalance" Dec 05 15:16:46 crc kubenswrapper[4840]: E1205 15:16:46.364555 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb1bdb27-1669-4d51-b689-4a48bd5f1567" containerName="mariadb-database-create" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.364678 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb1bdb27-1669-4d51-b689-4a48bd5f1567" containerName="mariadb-database-create" Dec 05 15:16:46 crc kubenswrapper[4840]: E1205 15:16:46.364845 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerName="init" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.365061 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerName="init" Dec 05 15:16:46 crc kubenswrapper[4840]: E1205 15:16:46.365205 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerName="dnsmasq-dns" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.365351 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerName="dnsmasq-dns" Dec 05 15:16:46 crc kubenswrapper[4840]: E1205 15:16:46.365524 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41783d81-4e56-4e4e-9335-7f978fb478c6" containerName="mariadb-database-create" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.365646 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="41783d81-4e56-4e4e-9335-7f978fb478c6" containerName="mariadb-database-create" Dec 05 15:16:46 crc kubenswrapper[4840]: E1205 15:16:46.365820 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4390a73-4bb5-42f6-a4dc-516f9657db60" containerName="mariadb-account-create-update" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.366061 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4390a73-4bb5-42f6-a4dc-516f9657db60" containerName="mariadb-account-create-update" Dec 05 15:16:46 crc kubenswrapper[4840]: E1205 15:16:46.366216 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f3441f-bb15-467f-b962-5c8b66b27cfe" containerName="mariadb-account-create-update" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.366359 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f3441f-bb15-467f-b962-5c8b66b27cfe" containerName="mariadb-account-create-update" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.366834 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4640d84-ab2c-41ae-b84b-461d096d1e28" containerName="dnsmasq-dns" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.367027 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="45ba7f21-a1e8-4443-816f-91c5392f62df" containerName="swift-ring-rebalance" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.367167 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f3441f-bb15-467f-b962-5c8b66b27cfe" containerName="mariadb-account-create-update" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.367316 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb1bdb27-1669-4d51-b689-4a48bd5f1567" containerName="mariadb-database-create" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.367493 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4390a73-4bb5-42f6-a4dc-516f9657db60" containerName="mariadb-account-create-update" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.367623 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="41783d81-4e56-4e4e-9335-7f978fb478c6" containerName="mariadb-database-create" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.368755 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bmclv" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.376537 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-bmclv"] Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.640307 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm2k9\" (UniqueName: \"kubernetes.io/projected/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-kube-api-access-qm2k9\") pod \"glance-db-create-bmclv\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " pod="openstack/glance-db-create-bmclv" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.647513 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-cttvn" podUID="ab69b71e-d666-46a7-a896-96a70fff685a" containerName="ovn-controller" probeResult="failure" output=< Dec 05 15:16:46 crc kubenswrapper[4840]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 15:16:46 crc kubenswrapper[4840]: > Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.651904 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-operator-scripts\") pod \"glance-db-create-bmclv\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " pod="openstack/glance-db-create-bmclv" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.659630 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-67a5-account-create-update-m5q8n"] Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.661483 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.668549 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.674168 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-67a5-account-create-update-m5q8n"] Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.753628 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm2k9\" (UniqueName: \"kubernetes.io/projected/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-kube-api-access-qm2k9\") pod \"glance-db-create-bmclv\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " pod="openstack/glance-db-create-bmclv" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.753722 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-operator-scripts\") pod \"glance-db-create-bmclv\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " pod="openstack/glance-db-create-bmclv" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.753812 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81caf91c-1e3f-48ee-b953-85b6cb26c922-operator-scripts\") pod \"glance-67a5-account-create-update-m5q8n\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.753843 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz7jf\" (UniqueName: \"kubernetes.io/projected/81caf91c-1e3f-48ee-b953-85b6cb26c922-kube-api-access-rz7jf\") pod \"glance-67a5-account-create-update-m5q8n\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.754777 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-operator-scripts\") pod \"glance-db-create-bmclv\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " pod="openstack/glance-db-create-bmclv" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.772774 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm2k9\" (UniqueName: \"kubernetes.io/projected/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-kube-api-access-qm2k9\") pod \"glance-db-create-bmclv\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " pod="openstack/glance-db-create-bmclv" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.855831 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81caf91c-1e3f-48ee-b953-85b6cb26c922-operator-scripts\") pod \"glance-67a5-account-create-update-m5q8n\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.856196 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz7jf\" (UniqueName: \"kubernetes.io/projected/81caf91c-1e3f-48ee-b953-85b6cb26c922-kube-api-access-rz7jf\") pod \"glance-67a5-account-create-update-m5q8n\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.863464 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81caf91c-1e3f-48ee-b953-85b6cb26c922-operator-scripts\") pod \"glance-67a5-account-create-update-m5q8n\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.875450 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz7jf\" (UniqueName: \"kubernetes.io/projected/81caf91c-1e3f-48ee-b953-85b6cb26c922-kube-api-access-rz7jf\") pod \"glance-67a5-account-create-update-m5q8n\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:46 crc kubenswrapper[4840]: I1205 15:16:46.990127 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.000958 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bmclv" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.127735 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7548669d-ea2b-4442-b4b6-f3408d636798","Type":"ContainerStarted","Data":"d2f637797ee752fdaec7b81d6a93955f31fc4bf657467982c12baea0838e7386"} Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.134060 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0fdc90f7-3261-4c8f-860b-c5f3890d3470","Type":"ContainerStarted","Data":"0e225f33c3bb4b75431332a2eac089ea90ac1401b70b36aa239069b016122895"} Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.169097 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=48.036751109 podStartE2EDuration="59.169043638s" podCreationTimestamp="2025-12-05 15:15:48 +0000 UTC" firstStartedPulling="2025-12-05 15:15:58.159512651 +0000 UTC m=+1036.500575265" lastFinishedPulling="2025-12-05 15:16:09.29180518 +0000 UTC m=+1047.632867794" observedRunningTime="2025-12-05 15:16:47.151543532 +0000 UTC m=+1085.492606146" watchObservedRunningTime="2025-12-05 15:16:47.169043638 +0000 UTC m=+1085.510106252" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.193692 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=52.524954928 podStartE2EDuration="1m1.193667095s" podCreationTimestamp="2025-12-05 15:15:46 +0000 UTC" firstStartedPulling="2025-12-05 15:16:00.627017165 +0000 UTC m=+1038.968079779" lastFinishedPulling="2025-12-05 15:16:09.295729332 +0000 UTC m=+1047.636791946" observedRunningTime="2025-12-05 15:16:47.177782555 +0000 UTC m=+1085.518845199" watchObservedRunningTime="2025-12-05 15:16:47.193667095 +0000 UTC m=+1085.534729729" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.356009 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-cttvn-config-ckwkr"] Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.357581 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.362845 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.387253 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn-config-ckwkr"] Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.440304 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.441960 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.444140 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.445314 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-8dxvb" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.445490 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.445716 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.452284 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.467356 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-additional-scripts\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.467434 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdlhf\" (UniqueName: \"kubernetes.io/projected/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-kube-api-access-sdlhf\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.467470 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-log-ovn\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.467501 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run-ovn\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.467550 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-scripts\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.467582 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.485128 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-67a5-account-create-update-m5q8n"] Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568558 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-additional-scripts\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568602 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568634 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdlhf\" (UniqueName: \"kubernetes.io/projected/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-kube-api-access-sdlhf\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568697 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-log-ovn\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568715 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1457a36a-acaa-42e9-b5ea-7667c272d25d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568734 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run-ovn\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568749 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568788 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-scripts\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568807 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568855 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568896 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcv5q\" (UniqueName: \"kubernetes.io/projected/1457a36a-acaa-42e9-b5ea-7667c272d25d-kube-api-access-dcv5q\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568914 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1457a36a-acaa-42e9-b5ea-7667c272d25d-scripts\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.568938 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457a36a-acaa-42e9-b5ea-7667c272d25d-config\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.569575 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-additional-scripts\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.570127 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run-ovn\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.570127 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.570306 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-log-ovn\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.571923 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-scripts\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.591725 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdlhf\" (UniqueName: \"kubernetes.io/projected/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-kube-api-access-sdlhf\") pod \"ovn-controller-cttvn-config-ckwkr\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: W1205 15:16:47.601598 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40044a4d_5ed3_4bb3_83f7_43b0dd3b56c0.slice/crio-b795712cc8aada23536ff8e42415bcf1c8449ae1164a3c0e627dde5b69cc0694 WatchSource:0}: Error finding container b795712cc8aada23536ff8e42415bcf1c8449ae1164a3c0e627dde5b69cc0694: Status 404 returned error can't find the container with id b795712cc8aada23536ff8e42415bcf1c8449ae1164a3c0e627dde5b69cc0694 Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.603731 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-bmclv"] Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670077 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1457a36a-acaa-42e9-b5ea-7667c272d25d-scripts\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670129 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457a36a-acaa-42e9-b5ea-7667c272d25d-config\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670177 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670221 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1457a36a-acaa-42e9-b5ea-7667c272d25d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670241 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670324 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670348 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcv5q\" (UniqueName: \"kubernetes.io/projected/1457a36a-acaa-42e9-b5ea-7667c272d25d-kube-api-access-dcv5q\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.670889 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1457a36a-acaa-42e9-b5ea-7667c272d25d-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.671337 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1457a36a-acaa-42e9-b5ea-7667c272d25d-config\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.671540 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1457a36a-acaa-42e9-b5ea-7667c272d25d-scripts\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.673988 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.674139 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.674623 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1457a36a-acaa-42e9-b5ea-7667c272d25d-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.686803 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.696101 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcv5q\" (UniqueName: \"kubernetes.io/projected/1457a36a-acaa-42e9-b5ea-7667c272d25d-kube-api-access-dcv5q\") pod \"ovn-northd-0\" (UID: \"1457a36a-acaa-42e9-b5ea-7667c272d25d\") " pod="openstack/ovn-northd-0" Dec 05 15:16:47 crc kubenswrapper[4840]: I1205 15:16:47.773643 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.144957 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-67a5-account-create-update-m5q8n" event={"ID":"81caf91c-1e3f-48ee-b953-85b6cb26c922","Type":"ContainerStarted","Data":"60f57e86b1c64d919067f5f25caceeb5f9bd970ab618ae418f090145fabe5eab"} Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.145011 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-67a5-account-create-update-m5q8n" event={"ID":"81caf91c-1e3f-48ee-b953-85b6cb26c922","Type":"ContainerStarted","Data":"e5bb073a69d95ecf66af017fb188535971910f10bc27ff079e3ae881073f07e2"} Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.151972 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bmclv" event={"ID":"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0","Type":"ContainerStarted","Data":"525858ebcd8bbff8d0b9efa77f7fa96de4f7addb5b4bf543b8febb2a6d75b6e9"} Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.152034 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bmclv" event={"ID":"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0","Type":"ContainerStarted","Data":"b795712cc8aada23536ff8e42415bcf1c8449ae1164a3c0e627dde5b69cc0694"} Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.171462 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-67a5-account-create-update-m5q8n" podStartSLOduration=2.171438523 podStartE2EDuration="2.171438523s" podCreationTimestamp="2025-12-05 15:16:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:48.164798585 +0000 UTC m=+1086.505861199" watchObservedRunningTime="2025-12-05 15:16:48.171438523 +0000 UTC m=+1086.512501137" Dec 05 15:16:48 crc kubenswrapper[4840]: W1205 15:16:48.185158 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b9e5ff8_9f77_491e_b1c7_9ae2c5c5c5f6.slice/crio-508169d6db78a40ebb18ad21514fe5c2a3c82d4966b8f596b6b917dfc2aada34 WatchSource:0}: Error finding container 508169d6db78a40ebb18ad21514fe5c2a3c82d4966b8f596b6b917dfc2aada34: Status 404 returned error can't find the container with id 508169d6db78a40ebb18ad21514fe5c2a3c82d4966b8f596b6b917dfc2aada34 Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.186791 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn-config-ckwkr"] Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.240881 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-bmclv" podStartSLOduration=2.240847129 podStartE2EDuration="2.240847129s" podCreationTimestamp="2025-12-05 15:16:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:48.181639002 +0000 UTC m=+1086.522701616" watchObservedRunningTime="2025-12-05 15:16:48.240847129 +0000 UTC m=+1086.581909743" Dec 05 15:16:48 crc kubenswrapper[4840]: I1205 15:16:48.279829 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 15:16:48 crc kubenswrapper[4840]: W1205 15:16:48.329958 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1457a36a_acaa_42e9_b5ea_7667c272d25d.slice/crio-96ce1f286a2f9921e2f97cf5ca6fbc3d381ed3ef86d7b1b6b08cf9264077cb73 WatchSource:0}: Error finding container 96ce1f286a2f9921e2f97cf5ca6fbc3d381ed3ef86d7b1b6b08cf9264077cb73: Status 404 returned error can't find the container with id 96ce1f286a2f9921e2f97cf5ca6fbc3d381ed3ef86d7b1b6b08cf9264077cb73 Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.171092 4840 generic.go:334] "Generic (PLEG): container finished" podID="3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" containerID="be2de59156e98ca002e13a6716bcb93f683e2804f791db487ccae2bdf55580ba" exitCode=0 Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.171151 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-ckwkr" event={"ID":"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6","Type":"ContainerDied","Data":"be2de59156e98ca002e13a6716bcb93f683e2804f791db487ccae2bdf55580ba"} Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.171177 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-ckwkr" event={"ID":"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6","Type":"ContainerStarted","Data":"508169d6db78a40ebb18ad21514fe5c2a3c82d4966b8f596b6b917dfc2aada34"} Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.174894 4840 generic.go:334] "Generic (PLEG): container finished" podID="40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0" containerID="525858ebcd8bbff8d0b9efa77f7fa96de4f7addb5b4bf543b8febb2a6d75b6e9" exitCode=0 Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.175158 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bmclv" event={"ID":"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0","Type":"ContainerDied","Data":"525858ebcd8bbff8d0b9efa77f7fa96de4f7addb5b4bf543b8febb2a6d75b6e9"} Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.177034 4840 generic.go:334] "Generic (PLEG): container finished" podID="81caf91c-1e3f-48ee-b953-85b6cb26c922" containerID="60f57e86b1c64d919067f5f25caceeb5f9bd970ab618ae418f090145fabe5eab" exitCode=0 Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.177107 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-67a5-account-create-update-m5q8n" event={"ID":"81caf91c-1e3f-48ee-b953-85b6cb26c922","Type":"ContainerDied","Data":"60f57e86b1c64d919067f5f25caceeb5f9bd970ab618ae418f090145fabe5eab"} Dec 05 15:16:49 crc kubenswrapper[4840]: I1205 15:16:49.178331 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1457a36a-acaa-42e9-b5ea-7667c272d25d","Type":"ContainerStarted","Data":"96ce1f286a2f9921e2f97cf5ca6fbc3d381ed3ef86d7b1b6b08cf9264077cb73"} Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.196587 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1457a36a-acaa-42e9-b5ea-7667c272d25d","Type":"ContainerStarted","Data":"acddf3755af49100a8a8fb53a0e6b9ef7cfdfb97efe6edaecb036204bf92f5a3"} Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.197013 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1457a36a-acaa-42e9-b5ea-7667c272d25d","Type":"ContainerStarted","Data":"6f3e4121f4706f4e5ac747dce124a2156a28eb00fe3fee842c86adf25eb5950b"} Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.197185 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.234771 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.19932089 podStartE2EDuration="3.2347155s" podCreationTimestamp="2025-12-05 15:16:47 +0000 UTC" firstStartedPulling="2025-12-05 15:16:48.33270435 +0000 UTC m=+1086.673766964" lastFinishedPulling="2025-12-05 15:16:49.36809892 +0000 UTC m=+1087.709161574" observedRunningTime="2025-12-05 15:16:50.218742628 +0000 UTC m=+1088.559805292" watchObservedRunningTime="2025-12-05 15:16:50.2347155 +0000 UTC m=+1088.575778164" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.508262 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.617709 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81caf91c-1e3f-48ee-b953-85b6cb26c922-operator-scripts\") pod \"81caf91c-1e3f-48ee-b953-85b6cb26c922\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.617883 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz7jf\" (UniqueName: \"kubernetes.io/projected/81caf91c-1e3f-48ee-b953-85b6cb26c922-kube-api-access-rz7jf\") pod \"81caf91c-1e3f-48ee-b953-85b6cb26c922\" (UID: \"81caf91c-1e3f-48ee-b953-85b6cb26c922\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.618897 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81caf91c-1e3f-48ee-b953-85b6cb26c922-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "81caf91c-1e3f-48ee-b953-85b6cb26c922" (UID: "81caf91c-1e3f-48ee-b953-85b6cb26c922"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.623094 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81caf91c-1e3f-48ee-b953-85b6cb26c922-kube-api-access-rz7jf" (OuterVolumeSpecName: "kube-api-access-rz7jf") pod "81caf91c-1e3f-48ee-b953-85b6cb26c922" (UID: "81caf91c-1e3f-48ee-b953-85b6cb26c922"). InnerVolumeSpecName "kube-api-access-rz7jf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.671988 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.679757 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bmclv" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.721769 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81caf91c-1e3f-48ee-b953-85b6cb26c922-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.721935 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz7jf\" (UniqueName: \"kubernetes.io/projected/81caf91c-1e3f-48ee-b953-85b6cb26c922-kube-api-access-rz7jf\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.822815 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm2k9\" (UniqueName: \"kubernetes.io/projected/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-kube-api-access-qm2k9\") pod \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.822892 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-operator-scripts\") pod \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\" (UID: \"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.822923 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-scripts\") pod \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.822958 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-additional-scripts\") pod \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.823006 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-log-ovn\") pod \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.823027 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdlhf\" (UniqueName: \"kubernetes.io/projected/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-kube-api-access-sdlhf\") pod \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824003 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run\") pod \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824071 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run-ovn\") pod \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\" (UID: \"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6\") " Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.823067 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" (UID: "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.823468 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0" (UID: "40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.823752 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" (UID: "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824033 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run" (OuterVolumeSpecName: "var-run") pod "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" (UID: "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824029 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-scripts" (OuterVolumeSpecName: "scripts") pod "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" (UID: "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824191 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" (UID: "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824420 4840 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824437 4840 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824446 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824457 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824465 4840 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.824473 4840 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.827278 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-kube-api-access-sdlhf" (OuterVolumeSpecName: "kube-api-access-sdlhf") pod "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" (UID: "3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6"). InnerVolumeSpecName "kube-api-access-sdlhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.827554 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-kube-api-access-qm2k9" (OuterVolumeSpecName: "kube-api-access-qm2k9") pod "40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0" (UID: "40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0"). InnerVolumeSpecName "kube-api-access-qm2k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.925575 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdlhf\" (UniqueName: \"kubernetes.io/projected/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6-kube-api-access-sdlhf\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:50 crc kubenswrapper[4840]: I1205 15:16:50.925609 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm2k9\" (UniqueName: \"kubernetes.io/projected/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0-kube-api-access-qm2k9\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.206997 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bmclv" event={"ID":"40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0","Type":"ContainerDied","Data":"b795712cc8aada23536ff8e42415bcf1c8449ae1164a3c0e627dde5b69cc0694"} Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.207053 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b795712cc8aada23536ff8e42415bcf1c8449ae1164a3c0e627dde5b69cc0694" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.207084 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bmclv" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.208678 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-67a5-account-create-update-m5q8n" event={"ID":"81caf91c-1e3f-48ee-b953-85b6cb26c922","Type":"ContainerDied","Data":"e5bb073a69d95ecf66af017fb188535971910f10bc27ff079e3ae881073f07e2"} Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.208709 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5bb073a69d95ecf66af017fb188535971910f10bc27ff079e3ae881073f07e2" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.208714 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-67a5-account-create-update-m5q8n" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.210945 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-ckwkr" event={"ID":"3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6","Type":"ContainerDied","Data":"508169d6db78a40ebb18ad21514fe5c2a3c82d4966b8f596b6b917dfc2aada34"} Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.210991 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="508169d6db78a40ebb18ad21514fe5c2a3c82d4966b8f596b6b917dfc2aada34" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.211001 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-ckwkr" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.410939 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-cttvn" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.783524 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-cttvn-config-ckwkr"] Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.796607 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-cttvn-config-ckwkr"] Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.820130 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-cttvn-config-2p2jh"] Dec 05 15:16:51 crc kubenswrapper[4840]: E1205 15:16:51.820459 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81caf91c-1e3f-48ee-b953-85b6cb26c922" containerName="mariadb-account-create-update" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.820474 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="81caf91c-1e3f-48ee-b953-85b6cb26c922" containerName="mariadb-account-create-update" Dec 05 15:16:51 crc kubenswrapper[4840]: E1205 15:16:51.820484 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" containerName="ovn-config" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.820491 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" containerName="ovn-config" Dec 05 15:16:51 crc kubenswrapper[4840]: E1205 15:16:51.820510 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0" containerName="mariadb-database-create" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.820516 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0" containerName="mariadb-database-create" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.820684 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0" containerName="mariadb-database-create" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.820697 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="81caf91c-1e3f-48ee-b953-85b6cb26c922" containerName="mariadb-account-create-update" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.820706 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" containerName="ovn-config" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.821253 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.824225 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.834532 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn-config-2p2jh"] Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.941063 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run-ovn\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.941614 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-scripts\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.941663 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jgvr\" (UniqueName: \"kubernetes.io/projected/a90da826-65f5-4c07-a00d-5f96fb718ad5-kube-api-access-8jgvr\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.941820 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.942024 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-log-ovn\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:51 crc kubenswrapper[4840]: I1205 15:16:51.942222 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-additional-scripts\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043300 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-additional-scripts\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043353 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run-ovn\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043397 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-scripts\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043423 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jgvr\" (UniqueName: \"kubernetes.io/projected/a90da826-65f5-4c07-a00d-5f96fb718ad5-kube-api-access-8jgvr\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043483 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043538 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-log-ovn\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043855 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-log-ovn\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043858 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.043934 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run-ovn\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.044316 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-additional-scripts\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.045745 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-scripts\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.064419 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jgvr\" (UniqueName: \"kubernetes.io/projected/a90da826-65f5-4c07-a00d-5f96fb718ad5-kube-api-access-8jgvr\") pod \"ovn-controller-cttvn-config-2p2jh\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.075881 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6" path="/var/lib/kubelet/pods/3b9e5ff8-9f77-491e-b1c7-9ae2c5c5c5f6/volumes" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.136923 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:52 crc kubenswrapper[4840]: I1205 15:16:52.560393 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn-config-2p2jh"] Dec 05 15:16:53 crc kubenswrapper[4840]: I1205 15:16:53.233918 4840 generic.go:334] "Generic (PLEG): container finished" podID="a90da826-65f5-4c07-a00d-5f96fb718ad5" containerID="298650201e59ffec2ec45ee76964aaca6c82f53b6d7bfa62b6af15c17019e837" exitCode=0 Dec 05 15:16:53 crc kubenswrapper[4840]: I1205 15:16:53.234019 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-2p2jh" event={"ID":"a90da826-65f5-4c07-a00d-5f96fb718ad5","Type":"ContainerDied","Data":"298650201e59ffec2ec45ee76964aaca6c82f53b6d7bfa62b6af15c17019e837"} Dec 05 15:16:53 crc kubenswrapper[4840]: I1205 15:16:53.234658 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-2p2jh" event={"ID":"a90da826-65f5-4c07-a00d-5f96fb718ad5","Type":"ContainerStarted","Data":"8d95ccb941fc8d8502ed00b10ce47cc9264ac0016f8246b2a214e943cfe73aef"} Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.630647 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789589 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-additional-scripts\") pod \"a90da826-65f5-4c07-a00d-5f96fb718ad5\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789648 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run-ovn\") pod \"a90da826-65f5-4c07-a00d-5f96fb718ad5\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789728 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-scripts\") pod \"a90da826-65f5-4c07-a00d-5f96fb718ad5\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789749 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jgvr\" (UniqueName: \"kubernetes.io/projected/a90da826-65f5-4c07-a00d-5f96fb718ad5-kube-api-access-8jgvr\") pod \"a90da826-65f5-4c07-a00d-5f96fb718ad5\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789799 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run\") pod \"a90da826-65f5-4c07-a00d-5f96fb718ad5\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789818 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a90da826-65f5-4c07-a00d-5f96fb718ad5" (UID: "a90da826-65f5-4c07-a00d-5f96fb718ad5"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789847 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-log-ovn\") pod \"a90da826-65f5-4c07-a00d-5f96fb718ad5\" (UID: \"a90da826-65f5-4c07-a00d-5f96fb718ad5\") " Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789893 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run" (OuterVolumeSpecName: "var-run") pod "a90da826-65f5-4c07-a00d-5f96fb718ad5" (UID: "a90da826-65f5-4c07-a00d-5f96fb718ad5"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.789989 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a90da826-65f5-4c07-a00d-5f96fb718ad5" (UID: "a90da826-65f5-4c07-a00d-5f96fb718ad5"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.790234 4840 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.790253 4840 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.790265 4840 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a90da826-65f5-4c07-a00d-5f96fb718ad5-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.790533 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a90da826-65f5-4c07-a00d-5f96fb718ad5" (UID: "a90da826-65f5-4c07-a00d-5f96fb718ad5"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.790802 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-scripts" (OuterVolumeSpecName: "scripts") pod "a90da826-65f5-4c07-a00d-5f96fb718ad5" (UID: "a90da826-65f5-4c07-a00d-5f96fb718ad5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.807218 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a90da826-65f5-4c07-a00d-5f96fb718ad5-kube-api-access-8jgvr" (OuterVolumeSpecName: "kube-api-access-8jgvr") pod "a90da826-65f5-4c07-a00d-5f96fb718ad5" (UID: "a90da826-65f5-4c07-a00d-5f96fb718ad5"). InnerVolumeSpecName "kube-api-access-8jgvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.891932 4840 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.892205 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a90da826-65f5-4c07-a00d-5f96fb718ad5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:54 crc kubenswrapper[4840]: I1205 15:16:54.892214 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jgvr\" (UniqueName: \"kubernetes.io/projected/a90da826-65f5-4c07-a00d-5f96fb718ad5-kube-api-access-8jgvr\") on node \"crc\" DevicePath \"\"" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.252710 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-2p2jh" event={"ID":"a90da826-65f5-4c07-a00d-5f96fb718ad5","Type":"ContainerDied","Data":"8d95ccb941fc8d8502ed00b10ce47cc9264ac0016f8246b2a214e943cfe73aef"} Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.252745 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d95ccb941fc8d8502ed00b10ce47cc9264ac0016f8246b2a214e943cfe73aef" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.252773 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-2p2jh" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.728081 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-cttvn-config-2p2jh"] Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.736895 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-cttvn-config-2p2jh"] Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.863064 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-cttvn-config-7dg6c"] Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.863096 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:55 crc kubenswrapper[4840]: E1205 15:16:55.863388 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a90da826-65f5-4c07-a00d-5f96fb718ad5" containerName="ovn-config" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.863403 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a90da826-65f5-4c07-a00d-5f96fb718ad5" containerName="ovn-config" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.863570 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="a90da826-65f5-4c07-a00d-5f96fb718ad5" containerName="ovn-config" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.864171 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.867467 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.870914 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/49fa86fd-482b-426d-9ec6-2c963600851e-etc-swift\") pod \"swift-storage-0\" (UID: \"49fa86fd-482b-426d-9ec6-2c963600851e\") " pod="openstack/swift-storage-0" Dec 05 15:16:55 crc kubenswrapper[4840]: I1205 15:16:55.930694 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn-config-7dg6c"] Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.034650 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.066154 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-additional-scripts\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.066514 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.066559 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run-ovn\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.066607 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-scripts\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.066676 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-log-ovn\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.066701 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f54wv\" (UniqueName: \"kubernetes.io/projected/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-kube-api-access-f54wv\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.077065 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a90da826-65f5-4c07-a00d-5f96fb718ad5" path="/var/lib/kubelet/pods/a90da826-65f5-4c07-a00d-5f96fb718ad5/volumes" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.167749 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-additional-scripts\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.167816 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.167877 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run-ovn\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.167931 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-scripts\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.167996 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-log-ovn\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.168020 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f54wv\" (UniqueName: \"kubernetes.io/projected/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-kube-api-access-f54wv\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.168373 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.168371 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-log-ovn\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.169029 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-additional-scripts\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.170293 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-scripts\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.170373 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run-ovn\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.234340 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f54wv\" (UniqueName: \"kubernetes.io/projected/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-kube-api-access-f54wv\") pod \"ovn-controller-cttvn-config-7dg6c\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.250571 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.588650 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-nc7h9"] Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.590106 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.592962 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.593420 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-j44ls" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.601424 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-nc7h9"] Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.641378 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 15:16:56 crc kubenswrapper[4840]: W1205 15:16:56.648593 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49fa86fd_482b_426d_9ec6_2c963600851e.slice/crio-3946ec6495a1e5948eece2d4e76c3f2bb97ebaa1762232c39b0a405f209ce3f4 WatchSource:0}: Error finding container 3946ec6495a1e5948eece2d4e76c3f2bb97ebaa1762232c39b0a405f209ce3f4: Status 404 returned error can't find the container with id 3946ec6495a1e5948eece2d4e76c3f2bb97ebaa1762232c39b0a405f209ce3f4 Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.672638 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd6xz\" (UniqueName: \"kubernetes.io/projected/e87241c9-95a1-4890-b5c7-0d4a68d1910f-kube-api-access-cd6xz\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.672929 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-config-data\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.672973 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-db-sync-config-data\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.673049 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-combined-ca-bundle\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.774058 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd6xz\" (UniqueName: \"kubernetes.io/projected/e87241c9-95a1-4890-b5c7-0d4a68d1910f-kube-api-access-cd6xz\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.774187 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-config-data\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.774210 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-db-sync-config-data\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.774231 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-combined-ca-bundle\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.779507 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-combined-ca-bundle\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.779515 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-db-sync-config-data\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.779706 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-config-data\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.789752 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd6xz\" (UniqueName: \"kubernetes.io/projected/e87241c9-95a1-4890-b5c7-0d4a68d1910f-kube-api-access-cd6xz\") pod \"glance-db-sync-nc7h9\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:56 crc kubenswrapper[4840]: W1205 15:16:56.847840 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c63923e_5481_4a39_8fa6_9bcd63bf3d15.slice/crio-1210fc54fd8cd5a9c020bf4b7719f57e0ac18ba9dd8f1bc919159f79b3670bb8 WatchSource:0}: Error finding container 1210fc54fd8cd5a9c020bf4b7719f57e0ac18ba9dd8f1bc919159f79b3670bb8: Status 404 returned error can't find the container with id 1210fc54fd8cd5a9c020bf4b7719f57e0ac18ba9dd8f1bc919159f79b3670bb8 Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.850538 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-cttvn-config-7dg6c"] Dec 05 15:16:56 crc kubenswrapper[4840]: I1205 15:16:56.958988 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nc7h9" Dec 05 15:16:57 crc kubenswrapper[4840]: I1205 15:16:57.271921 4840 generic.go:334] "Generic (PLEG): container finished" podID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerID="a44027a9b622191feea57e10e032a53de96578f30e052b68af4d233fb01ec896" exitCode=0 Dec 05 15:16:57 crc kubenswrapper[4840]: I1205 15:16:57.272028 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f169c577-448f-45db-bcdd-f34f5c24e6bb","Type":"ContainerDied","Data":"a44027a9b622191feea57e10e032a53de96578f30e052b68af4d233fb01ec896"} Dec 05 15:16:57 crc kubenswrapper[4840]: I1205 15:16:57.273588 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"3946ec6495a1e5948eece2d4e76c3f2bb97ebaa1762232c39b0a405f209ce3f4"} Dec 05 15:16:57 crc kubenswrapper[4840]: I1205 15:16:57.277705 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-7dg6c" event={"ID":"8c63923e-5481-4a39-8fa6-9bcd63bf3d15","Type":"ContainerStarted","Data":"1e74c74674a33aa47a2253145689571d0cc8a4e98b2644024a619ecd93e158c2"} Dec 05 15:16:57 crc kubenswrapper[4840]: I1205 15:16:57.277810 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-7dg6c" event={"ID":"8c63923e-5481-4a39-8fa6-9bcd63bf3d15","Type":"ContainerStarted","Data":"1210fc54fd8cd5a9c020bf4b7719f57e0ac18ba9dd8f1bc919159f79b3670bb8"} Dec 05 15:16:57 crc kubenswrapper[4840]: I1205 15:16:57.321544 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-cttvn-config-7dg6c" podStartSLOduration=2.321527822 podStartE2EDuration="2.321527822s" podCreationTimestamp="2025-12-05 15:16:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:16:57.320946395 +0000 UTC m=+1095.662009009" watchObservedRunningTime="2025-12-05 15:16:57.321527822 +0000 UTC m=+1095.662590436" Dec 05 15:16:57 crc kubenswrapper[4840]: I1205 15:16:57.575022 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-nc7h9"] Dec 05 15:16:57 crc kubenswrapper[4840]: W1205 15:16:57.592784 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode87241c9_95a1_4890_b5c7_0d4a68d1910f.slice/crio-80b39b686cc621490324344cf538beb6e1cd872e4ef0cb7ab1d7d5e2a11deb06 WatchSource:0}: Error finding container 80b39b686cc621490324344cf538beb6e1cd872e4ef0cb7ab1d7d5e2a11deb06: Status 404 returned error can't find the container with id 80b39b686cc621490324344cf538beb6e1cd872e4ef0cb7ab1d7d5e2a11deb06 Dec 05 15:16:58 crc kubenswrapper[4840]: I1205 15:16:58.291791 4840 generic.go:334] "Generic (PLEG): container finished" podID="8c63923e-5481-4a39-8fa6-9bcd63bf3d15" containerID="1e74c74674a33aa47a2253145689571d0cc8a4e98b2644024a619ecd93e158c2" exitCode=0 Dec 05 15:16:58 crc kubenswrapper[4840]: I1205 15:16:58.292002 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-7dg6c" event={"ID":"8c63923e-5481-4a39-8fa6-9bcd63bf3d15","Type":"ContainerDied","Data":"1e74c74674a33aa47a2253145689571d0cc8a4e98b2644024a619ecd93e158c2"} Dec 05 15:16:58 crc kubenswrapper[4840]: I1205 15:16:58.294577 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nc7h9" event={"ID":"e87241c9-95a1-4890-b5c7-0d4a68d1910f","Type":"ContainerStarted","Data":"80b39b686cc621490324344cf538beb6e1cd872e4ef0cb7ab1d7d5e2a11deb06"} Dec 05 15:16:58 crc kubenswrapper[4840]: I1205 15:16:58.296916 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f169c577-448f-45db-bcdd-f34f5c24e6bb","Type":"ContainerStarted","Data":"d69c4ae1302b8f1f898d03d8e3007d19a59c63e4e1e7dbbcdf22386585f688a8"} Dec 05 15:16:58 crc kubenswrapper[4840]: I1205 15:16:58.297583 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:16:58 crc kubenswrapper[4840]: I1205 15:16:58.299327 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"97124c79a6e713741f20aa20a4f381abc4ed8d430a8977276f3794c3e07e398c"} Dec 05 15:16:58 crc kubenswrapper[4840]: I1205 15:16:58.329689 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=73.556974892 podStartE2EDuration="1m23.3296735s" podCreationTimestamp="2025-12-05 15:15:35 +0000 UTC" firstStartedPulling="2025-12-05 15:15:57.628732771 +0000 UTC m=+1035.969795385" lastFinishedPulling="2025-12-05 15:16:07.401431379 +0000 UTC m=+1045.742493993" observedRunningTime="2025-12-05 15:16:58.328453176 +0000 UTC m=+1096.669515790" watchObservedRunningTime="2025-12-05 15:16:58.3296735 +0000 UTC m=+1096.670736114" Dec 05 15:16:59 crc kubenswrapper[4840]: I1205 15:16:59.311478 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"9214204f30d2a645357d850213a850c71010a3a3e947ace2b7ea4e31a137cbc0"} Dec 05 15:16:59 crc kubenswrapper[4840]: I1205 15:16:59.311525 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"2aafc2bfe4ebcaf476abdccca222e507c6f20cfa53257921666c40433179e4cf"} Dec 05 15:16:59 crc kubenswrapper[4840]: I1205 15:16:59.311539 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"fbf81c2025b476be50cb0b92843527b98712091326d736a74b9f7d062ee4fa96"} Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.147028 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304354 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-additional-scripts\") pod \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304439 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run\") pod \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304464 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f54wv\" (UniqueName: \"kubernetes.io/projected/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-kube-api-access-f54wv\") pod \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304487 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-scripts\") pod \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304557 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-log-ovn\") pod \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304658 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run-ovn\") pod \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\" (UID: \"8c63923e-5481-4a39-8fa6-9bcd63bf3d15\") " Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304928 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8c63923e-5481-4a39-8fa6-9bcd63bf3d15" (UID: "8c63923e-5481-4a39-8fa6-9bcd63bf3d15"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304973 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8c63923e-5481-4a39-8fa6-9bcd63bf3d15" (UID: "8c63923e-5481-4a39-8fa6-9bcd63bf3d15"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.304946 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run" (OuterVolumeSpecName: "var-run") pod "8c63923e-5481-4a39-8fa6-9bcd63bf3d15" (UID: "8c63923e-5481-4a39-8fa6-9bcd63bf3d15"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.305228 4840 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.305245 4840 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.305256 4840 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.305645 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "8c63923e-5481-4a39-8fa6-9bcd63bf3d15" (UID: "8c63923e-5481-4a39-8fa6-9bcd63bf3d15"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.305930 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-scripts" (OuterVolumeSpecName: "scripts") pod "8c63923e-5481-4a39-8fa6-9bcd63bf3d15" (UID: "8c63923e-5481-4a39-8fa6-9bcd63bf3d15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.314547 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-kube-api-access-f54wv" (OuterVolumeSpecName: "kube-api-access-f54wv") pod "8c63923e-5481-4a39-8fa6-9bcd63bf3d15" (UID: "8c63923e-5481-4a39-8fa6-9bcd63bf3d15"). InnerVolumeSpecName "kube-api-access-f54wv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.321303 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-cttvn-config-7dg6c" event={"ID":"8c63923e-5481-4a39-8fa6-9bcd63bf3d15","Type":"ContainerDied","Data":"1210fc54fd8cd5a9c020bf4b7719f57e0ac18ba9dd8f1bc919159f79b3670bb8"} Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.321337 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1210fc54fd8cd5a9c020bf4b7719f57e0ac18ba9dd8f1bc919159f79b3670bb8" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.321387 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-cttvn-config-7dg6c" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.409029 4840 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.409455 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f54wv\" (UniqueName: \"kubernetes.io/projected/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-kube-api-access-f54wv\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:00 crc kubenswrapper[4840]: I1205 15:17:00.409473 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c63923e-5481-4a39-8fa6-9bcd63bf3d15-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:01 crc kubenswrapper[4840]: I1205 15:17:01.221059 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-cttvn-config-7dg6c"] Dec 05 15:17:01 crc kubenswrapper[4840]: I1205 15:17:01.227321 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-cttvn-config-7dg6c"] Dec 05 15:17:02 crc kubenswrapper[4840]: I1205 15:17:02.089481 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c63923e-5481-4a39-8fa6-9bcd63bf3d15" path="/var/lib/kubelet/pods/8c63923e-5481-4a39-8fa6-9bcd63bf3d15/volumes" Dec 05 15:17:02 crc kubenswrapper[4840]: I1205 15:17:02.841136 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 15:17:03 crc kubenswrapper[4840]: I1205 15:17:03.500924 4840 generic.go:334] "Generic (PLEG): container finished" podID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerID="7d8de606e132e801bf3051d60b8122b938e918f5ababd4e11fc0130a6cfaf2ca" exitCode=0 Dec 05 15:17:03 crc kubenswrapper[4840]: I1205 15:17:03.500961 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e8fc49c1-0820-4dcb-9a50-9d3504b768d9","Type":"ContainerDied","Data":"7d8de606e132e801bf3051d60b8122b938e918f5ababd4e11fc0130a6cfaf2ca"} Dec 05 15:17:07 crc kubenswrapper[4840]: I1205 15:17:07.448696 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 05 15:17:11 crc kubenswrapper[4840]: E1205 15:17:11.601741 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 05 15:17:11 crc kubenswrapper[4840]: E1205 15:17:11.602161 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cd6xz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-nc7h9_openstack(e87241c9-95a1-4890-b5c7-0d4a68d1910f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:17:11 crc kubenswrapper[4840]: E1205 15:17:11.603540 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-nc7h9" podUID="e87241c9-95a1-4890-b5c7-0d4a68d1910f" Dec 05 15:17:12 crc kubenswrapper[4840]: I1205 15:17:12.051269 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e8fc49c1-0820-4dcb-9a50-9d3504b768d9","Type":"ContainerStarted","Data":"2c611d00f0b25d3869e1beb7eb13a1b7e50608dc6276ce0868249808e9d71ed1"} Dec 05 15:17:12 crc kubenswrapper[4840]: I1205 15:17:12.051998 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 15:17:12 crc kubenswrapper[4840]: E1205 15:17:12.052083 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-nc7h9" podUID="e87241c9-95a1-4890-b5c7-0d4a68d1910f" Dec 05 15:17:12 crc kubenswrapper[4840]: I1205 15:17:12.102896 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=85.373888177 podStartE2EDuration="1m37.102873763s" podCreationTimestamp="2025-12-05 15:15:35 +0000 UTC" firstStartedPulling="2025-12-05 15:15:55.959322227 +0000 UTC m=+1034.300384851" lastFinishedPulling="2025-12-05 15:16:07.688307833 +0000 UTC m=+1046.029370437" observedRunningTime="2025-12-05 15:17:12.097794229 +0000 UTC m=+1110.438856843" watchObservedRunningTime="2025-12-05 15:17:12.102873763 +0000 UTC m=+1110.443936377" Dec 05 15:17:13 crc kubenswrapper[4840]: I1205 15:17:13.064073 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"1e431e01d4e116ae7c9fb5a562adbca944bccc6dd09805a567fc3db3db87cf02"} Dec 05 15:17:13 crc kubenswrapper[4840]: I1205 15:17:13.064751 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"485b5b15c5d741fdd1d90ee919bc37df9ae17a2fa6f4f76f8f72f955492ada25"} Dec 05 15:17:13 crc kubenswrapper[4840]: I1205 15:17:13.064806 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"7ab6d75669c10d27d2a94fe03ef24998d66ea3f1e749b52e76cdd33ab63a58fa"} Dec 05 15:17:13 crc kubenswrapper[4840]: I1205 15:17:13.064822 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"f3d33084b5dccb6656dd9586fcf966c3e3a58eda488b29f8633edd98200f1a59"} Dec 05 15:17:15 crc kubenswrapper[4840]: I1205 15:17:15.088306 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"d007ce4a048ec660a70a9baca599f187f7c91d53456259a9b3e7ae484a2e9e3c"} Dec 05 15:17:15 crc kubenswrapper[4840]: I1205 15:17:15.088907 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"caf255102f818d7f9587aaf12fa59e6d0ce3ad537d31b5246e9ae0e0e5aca7f3"} Dec 05 15:17:15 crc kubenswrapper[4840]: I1205 15:17:15.088926 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"f4ccd7145748498cc7c93c11c715303d86c522c4d5f4c31ce7cc2fced0ff654d"} Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.102364 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"8991fbdf6b9649d295ade0632e6b91785414bcb529cf2cf6df90fbc6d30f1a51"} Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.102410 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"1a152c071106b92baa576a56e4e953d44357f86c6c370970f7130490b864375e"} Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.102419 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"17c0ffd336321471896400a1b177617726be96071e3f29e1dc106f40d225a2d6"} Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.102428 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"49fa86fd-482b-426d-9ec6-2c963600851e","Type":"ContainerStarted","Data":"f545a5b5c4b4fdda329d8b2d80569f674856a91d3b06332a3c9c453eeb4f9f96"} Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.137045 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.303198191999996 podStartE2EDuration="54.13702638s" podCreationTimestamp="2025-12-05 15:16:22 +0000 UTC" firstStartedPulling="2025-12-05 15:16:56.652417134 +0000 UTC m=+1094.993479748" lastFinishedPulling="2025-12-05 15:17:14.486245322 +0000 UTC m=+1112.827307936" observedRunningTime="2025-12-05 15:17:16.12888084 +0000 UTC m=+1114.469943494" watchObservedRunningTime="2025-12-05 15:17:16.13702638 +0000 UTC m=+1114.478088994" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.407052 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-5n2bl"] Dec 05 15:17:16 crc kubenswrapper[4840]: E1205 15:17:16.407546 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c63923e-5481-4a39-8fa6-9bcd63bf3d15" containerName="ovn-config" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.407559 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c63923e-5481-4a39-8fa6-9bcd63bf3d15" containerName="ovn-config" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.407897 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c63923e-5481-4a39-8fa6-9bcd63bf3d15" containerName="ovn-config" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.408943 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.412041 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.424332 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-5n2bl"] Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.595463 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnbqz\" (UniqueName: \"kubernetes.io/projected/9ead46a4-d789-4a22-9332-ed2c4f706010-kube-api-access-wnbqz\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.595821 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.595903 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.595930 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.595958 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-config\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.595989 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.697148 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-config\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.697249 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.697307 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnbqz\" (UniqueName: \"kubernetes.io/projected/9ead46a4-d789-4a22-9332-ed2c4f706010-kube-api-access-wnbqz\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.697371 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.697472 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.697512 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.698236 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-config\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.699551 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.699684 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.700015 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.700687 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.718317 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnbqz\" (UniqueName: \"kubernetes.io/projected/9ead46a4-d789-4a22-9332-ed2c4f706010-kube-api-access-wnbqz\") pod \"dnsmasq-dns-6d5b6d6b67-5n2bl\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:16 crc kubenswrapper[4840]: I1205 15:17:16.735630 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:17 crc kubenswrapper[4840]: W1205 15:17:17.169291 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ead46a4_d789_4a22_9332_ed2c4f706010.slice/crio-ad23933e2087ad98dbb272819deb866d3aa69a2ff7457c9e5e5c4b6983bf276d WatchSource:0}: Error finding container ad23933e2087ad98dbb272819deb866d3aa69a2ff7457c9e5e5c4b6983bf276d: Status 404 returned error can't find the container with id ad23933e2087ad98dbb272819deb866d3aa69a2ff7457c9e5e5c4b6983bf276d Dec 05 15:17:17 crc kubenswrapper[4840]: I1205 15:17:17.187773 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-5n2bl"] Dec 05 15:17:17 crc kubenswrapper[4840]: I1205 15:17:17.449141 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:17:18 crc kubenswrapper[4840]: I1205 15:17:18.120623 4840 generic.go:334] "Generic (PLEG): container finished" podID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerID="fa67b30a2a132c2e14a6d351bd18491f383d29afd2d680144173f660fd021649" exitCode=0 Dec 05 15:17:18 crc kubenswrapper[4840]: I1205 15:17:18.120679 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" event={"ID":"9ead46a4-d789-4a22-9332-ed2c4f706010","Type":"ContainerDied","Data":"fa67b30a2a132c2e14a6d351bd18491f383d29afd2d680144173f660fd021649"} Dec 05 15:17:18 crc kubenswrapper[4840]: I1205 15:17:18.121039 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" event={"ID":"9ead46a4-d789-4a22-9332-ed2c4f706010","Type":"ContainerStarted","Data":"ad23933e2087ad98dbb272819deb866d3aa69a2ff7457c9e5e5c4b6983bf276d"} Dec 05 15:17:19 crc kubenswrapper[4840]: I1205 15:17:19.129115 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" event={"ID":"9ead46a4-d789-4a22-9332-ed2c4f706010","Type":"ContainerStarted","Data":"1ce06db264cce1f0192fc6f568105da48de1b8a6728582327e6c7e9d6134f2be"} Dec 05 15:17:19 crc kubenswrapper[4840]: I1205 15:17:19.129551 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:19 crc kubenswrapper[4840]: I1205 15:17:19.148940 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" podStartSLOduration=3.148921085 podStartE2EDuration="3.148921085s" podCreationTimestamp="2025-12-05 15:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:19.144600332 +0000 UTC m=+1117.485662936" watchObservedRunningTime="2025-12-05 15:17:19.148921085 +0000 UTC m=+1117.489983689" Dec 05 15:17:19 crc kubenswrapper[4840]: I1205 15:17:19.472457 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:17:19 crc kubenswrapper[4840]: I1205 15:17:19.472533 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:17:26 crc kubenswrapper[4840]: I1205 15:17:26.234379 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nc7h9" event={"ID":"e87241c9-95a1-4890-b5c7-0d4a68d1910f","Type":"ContainerStarted","Data":"eae57e6c8c36c1fcb90f1e64dc9c58f1b0f35014599ff0c1c353296ffbe56b77"} Dec 05 15:17:26 crc kubenswrapper[4840]: I1205 15:17:26.250832 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-nc7h9" podStartSLOduration=2.339500345 podStartE2EDuration="30.250813727s" podCreationTimestamp="2025-12-05 15:16:56 +0000 UTC" firstStartedPulling="2025-12-05 15:16:57.59513623 +0000 UTC m=+1095.936198834" lastFinishedPulling="2025-12-05 15:17:25.506449572 +0000 UTC m=+1123.847512216" observedRunningTime="2025-12-05 15:17:26.249191551 +0000 UTC m=+1124.590254165" watchObservedRunningTime="2025-12-05 15:17:26.250813727 +0000 UTC m=+1124.591876341" Dec 05 15:17:26 crc kubenswrapper[4840]: I1205 15:17:26.738117 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:17:26 crc kubenswrapper[4840]: I1205 15:17:26.808711 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-ccff5"] Dec 05 15:17:26 crc kubenswrapper[4840]: I1205 15:17:26.809011 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" podUID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerName="dnsmasq-dns" containerID="cri-o://0a3e6f3200f151a74a5f6dedf9b48a7b0f81d62ca7d8f4a6165dbfa27d38c4a7" gracePeriod=10 Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.241884 4840 generic.go:334] "Generic (PLEG): container finished" podID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerID="0a3e6f3200f151a74a5f6dedf9b48a7b0f81d62ca7d8f4a6165dbfa27d38c4a7" exitCode=0 Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.242192 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" event={"ID":"376013cc-6ae2-4f36-adf2-8aa481b4789f","Type":"ContainerDied","Data":"0a3e6f3200f151a74a5f6dedf9b48a7b0f81d62ca7d8f4a6165dbfa27d38c4a7"} Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.242216 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" event={"ID":"376013cc-6ae2-4f36-adf2-8aa481b4789f","Type":"ContainerDied","Data":"458cc426991ee34b2b94c65835b8765f45bbe1d85aa8560b3d4153f99c739e69"} Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.242227 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="458cc426991ee34b2b94c65835b8765f45bbe1d85aa8560b3d4153f99c739e69" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.250897 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.386038 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.433243 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5vwv\" (UniqueName: \"kubernetes.io/projected/376013cc-6ae2-4f36-adf2-8aa481b4789f-kube-api-access-w5vwv\") pod \"376013cc-6ae2-4f36-adf2-8aa481b4789f\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.433300 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-dns-svc\") pod \"376013cc-6ae2-4f36-adf2-8aa481b4789f\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.433325 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-config\") pod \"376013cc-6ae2-4f36-adf2-8aa481b4789f\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.433415 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-sb\") pod \"376013cc-6ae2-4f36-adf2-8aa481b4789f\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.433500 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-nb\") pod \"376013cc-6ae2-4f36-adf2-8aa481b4789f\" (UID: \"376013cc-6ae2-4f36-adf2-8aa481b4789f\") " Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.445844 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/376013cc-6ae2-4f36-adf2-8aa481b4789f-kube-api-access-w5vwv" (OuterVolumeSpecName: "kube-api-access-w5vwv") pod "376013cc-6ae2-4f36-adf2-8aa481b4789f" (UID: "376013cc-6ae2-4f36-adf2-8aa481b4789f"). InnerVolumeSpecName "kube-api-access-w5vwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.501689 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "376013cc-6ae2-4f36-adf2-8aa481b4789f" (UID: "376013cc-6ae2-4f36-adf2-8aa481b4789f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.504046 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "376013cc-6ae2-4f36-adf2-8aa481b4789f" (UID: "376013cc-6ae2-4f36-adf2-8aa481b4789f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.529403 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "376013cc-6ae2-4f36-adf2-8aa481b4789f" (UID: "376013cc-6ae2-4f36-adf2-8aa481b4789f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.535378 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.535408 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.535419 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5vwv\" (UniqueName: \"kubernetes.io/projected/376013cc-6ae2-4f36-adf2-8aa481b4789f-kube-api-access-w5vwv\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.535431 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.538090 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-config" (OuterVolumeSpecName: "config") pod "376013cc-6ae2-4f36-adf2-8aa481b4789f" (UID: "376013cc-6ae2-4f36-adf2-8aa481b4789f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.639592 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/376013cc-6ae2-4f36-adf2-8aa481b4789f-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.769048 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-e3a9-account-create-update-fv9lq"] Dec 05 15:17:27 crc kubenswrapper[4840]: E1205 15:17:27.770197 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerName="init" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.770297 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerName="init" Dec 05 15:17:27 crc kubenswrapper[4840]: E1205 15:17:27.770385 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerName="dnsmasq-dns" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.770459 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerName="dnsmasq-dns" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.770815 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="376013cc-6ae2-4f36-adf2-8aa481b4789f" containerName="dnsmasq-dns" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.771896 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.775012 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-nvknm"] Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.778576 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.778775 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.795173 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e3a9-account-create-update-fv9lq"] Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.878935 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nvknm"] Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.907748 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-knjqv"] Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.909171 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.924488 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-knjqv"] Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.949056 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-operator-scripts\") pod \"cinder-e3a9-account-create-update-fv9lq\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.949815 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1360652-5788-4fce-9395-312b4e57b7f3-operator-scripts\") pod \"cinder-db-create-nvknm\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.952034 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlg42\" (UniqueName: \"kubernetes.io/projected/c1360652-5788-4fce-9395-312b4e57b7f3-kube-api-access-wlg42\") pod \"cinder-db-create-nvknm\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:27 crc kubenswrapper[4840]: I1205 15:17:27.952142 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjx57\" (UniqueName: \"kubernetes.io/projected/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-kube-api-access-tjx57\") pod \"cinder-e3a9-account-create-update-fv9lq\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.053530 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-264fn\" (UniqueName: \"kubernetes.io/projected/8b380884-f228-426c-bf7c-ab261af14b51-kube-api-access-264fn\") pod \"barbican-db-create-knjqv\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.053595 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b380884-f228-426c-bf7c-ab261af14b51-operator-scripts\") pod \"barbican-db-create-knjqv\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.053671 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlg42\" (UniqueName: \"kubernetes.io/projected/c1360652-5788-4fce-9395-312b4e57b7f3-kube-api-access-wlg42\") pod \"cinder-db-create-nvknm\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.053710 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjx57\" (UniqueName: \"kubernetes.io/projected/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-kube-api-access-tjx57\") pod \"cinder-e3a9-account-create-update-fv9lq\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.053750 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-operator-scripts\") pod \"cinder-e3a9-account-create-update-fv9lq\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.053807 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1360652-5788-4fce-9395-312b4e57b7f3-operator-scripts\") pod \"cinder-db-create-nvknm\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.054500 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-operator-scripts\") pod \"cinder-e3a9-account-create-update-fv9lq\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.054709 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1360652-5788-4fce-9395-312b4e57b7f3-operator-scripts\") pod \"cinder-db-create-nvknm\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.063949 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-c263-account-create-update-28ql2"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.064939 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.068058 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.078347 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlg42\" (UniqueName: \"kubernetes.io/projected/c1360652-5788-4fce-9395-312b4e57b7f3-kube-api-access-wlg42\") pod \"cinder-db-create-nvknm\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.085374 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c263-account-create-update-28ql2"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.087507 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjx57\" (UniqueName: \"kubernetes.io/projected/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-kube-api-access-tjx57\") pod \"cinder-e3a9-account-create-update-fv9lq\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.132767 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.136149 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.164379 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-264fn\" (UniqueName: \"kubernetes.io/projected/8b380884-f228-426c-bf7c-ab261af14b51-kube-api-access-264fn\") pod \"barbican-db-create-knjqv\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.164442 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b380884-f228-426c-bf7c-ab261af14b51-operator-scripts\") pod \"barbican-db-create-knjqv\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.178004 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-k7szk"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.182080 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.188557 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b380884-f228-426c-bf7c-ab261af14b51-operator-scripts\") pod \"barbican-db-create-knjqv\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.195046 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.195927 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.196083 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6bz6r" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.196535 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.203232 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-kjhz7"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.204495 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.212398 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-k7szk"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.240571 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-kjhz7"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.254062 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-264fn\" (UniqueName: \"kubernetes.io/projected/8b380884-f228-426c-bf7c-ab261af14b51-kube-api-access-264fn\") pod \"barbican-db-create-knjqv\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.263027 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-ccff5" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.266843 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6240b61e-5c9f-4590-9b17-c0faae5242cb-operator-scripts\") pod \"barbican-c263-account-create-update-28ql2\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.266934 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6994\" (UniqueName: \"kubernetes.io/projected/6240b61e-5c9f-4590-9b17-c0faae5242cb-kube-api-access-d6994\") pod \"barbican-c263-account-create-update-28ql2\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.272371 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.272428 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-649b-account-create-update-dj8zx"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.273773 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.286815 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.290035 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-649b-account-create-update-dj8zx"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.337957 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-ccff5"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.345306 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-ccff5"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368609 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-config-data\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368659 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-combined-ca-bundle\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368686 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6240b61e-5c9f-4590-9b17-c0faae5242cb-operator-scripts\") pod \"barbican-c263-account-create-update-28ql2\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368710 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6994\" (UniqueName: \"kubernetes.io/projected/6240b61e-5c9f-4590-9b17-c0faae5242cb-kube-api-access-d6994\") pod \"barbican-c263-account-create-update-28ql2\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368757 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700cf304-d742-425e-9daf-f56b05297d38-operator-scripts\") pod \"neutron-649b-account-create-update-dj8zx\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368790 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-operator-scripts\") pod \"neutron-db-create-kjhz7\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368810 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5vvl\" (UniqueName: \"kubernetes.io/projected/700cf304-d742-425e-9daf-f56b05297d38-kube-api-access-j5vvl\") pod \"neutron-649b-account-create-update-dj8zx\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368969 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx9wb\" (UniqueName: \"kubernetes.io/projected/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-kube-api-access-gx9wb\") pod \"neutron-db-create-kjhz7\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.368990 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75d4q\" (UniqueName: \"kubernetes.io/projected/914b1f5a-b458-459d-ac76-d06ffd0ef611-kube-api-access-75d4q\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.369635 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6240b61e-5c9f-4590-9b17-c0faae5242cb-operator-scripts\") pod \"barbican-c263-account-create-update-28ql2\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.387462 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6994\" (UniqueName: \"kubernetes.io/projected/6240b61e-5c9f-4590-9b17-c0faae5242cb-kube-api-access-d6994\") pod \"barbican-c263-account-create-update-28ql2\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.470491 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx9wb\" (UniqueName: \"kubernetes.io/projected/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-kube-api-access-gx9wb\") pod \"neutron-db-create-kjhz7\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.470839 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75d4q\" (UniqueName: \"kubernetes.io/projected/914b1f5a-b458-459d-ac76-d06ffd0ef611-kube-api-access-75d4q\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.470945 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-config-data\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.470970 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-combined-ca-bundle\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.471059 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700cf304-d742-425e-9daf-f56b05297d38-operator-scripts\") pod \"neutron-649b-account-create-update-dj8zx\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.471122 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-operator-scripts\") pod \"neutron-db-create-kjhz7\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.471167 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5vvl\" (UniqueName: \"kubernetes.io/projected/700cf304-d742-425e-9daf-f56b05297d38-kube-api-access-j5vvl\") pod \"neutron-649b-account-create-update-dj8zx\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.472633 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700cf304-d742-425e-9daf-f56b05297d38-operator-scripts\") pod \"neutron-649b-account-create-update-dj8zx\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.473498 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-operator-scripts\") pod \"neutron-db-create-kjhz7\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.475847 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-combined-ca-bundle\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.488996 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-config-data\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.492361 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5vvl\" (UniqueName: \"kubernetes.io/projected/700cf304-d742-425e-9daf-f56b05297d38-kube-api-access-j5vvl\") pod \"neutron-649b-account-create-update-dj8zx\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.492678 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.493138 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx9wb\" (UniqueName: \"kubernetes.io/projected/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-kube-api-access-gx9wb\") pod \"neutron-db-create-kjhz7\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.493527 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75d4q\" (UniqueName: \"kubernetes.io/projected/914b1f5a-b458-459d-ac76-d06ffd0ef611-kube-api-access-75d4q\") pod \"keystone-db-sync-k7szk\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.587753 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.612367 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.619800 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.830730 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nvknm"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.840571 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-e3a9-account-create-update-fv9lq"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.851508 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-knjqv"] Dec 05 15:17:28 crc kubenswrapper[4840]: I1205 15:17:28.906755 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-c263-account-create-update-28ql2"] Dec 05 15:17:28 crc kubenswrapper[4840]: W1205 15:17:28.971056 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6240b61e_5c9f_4590_9b17_c0faae5242cb.slice/crio-ce19b6844504f95dbd08a361305603c5c2ba76282f0d12513e9d27e99e6799b7 WatchSource:0}: Error finding container ce19b6844504f95dbd08a361305603c5c2ba76282f0d12513e9d27e99e6799b7: Status 404 returned error can't find the container with id ce19b6844504f95dbd08a361305603c5c2ba76282f0d12513e9d27e99e6799b7 Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.023309 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-k7szk"] Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.059719 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-kjhz7"] Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.131511 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-649b-account-create-update-dj8zx"] Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.277888 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k7szk" event={"ID":"914b1f5a-b458-459d-ac76-d06ffd0ef611","Type":"ContainerStarted","Data":"52f47094b534b0af5ab502798bade63e9b273455ca666773a364d9f29f072ab2"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.281354 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nvknm" event={"ID":"c1360652-5788-4fce-9395-312b4e57b7f3","Type":"ContainerStarted","Data":"0f6e95c495f252fd6c4e5c18e0bb38a420714e46554b9a11c4c449118011f644"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.290126 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c263-account-create-update-28ql2" event={"ID":"6240b61e-5c9f-4590-9b17-c0faae5242cb","Type":"ContainerStarted","Data":"ce19b6844504f95dbd08a361305603c5c2ba76282f0d12513e9d27e99e6799b7"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.292744 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e3a9-account-create-update-fv9lq" event={"ID":"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8","Type":"ContainerStarted","Data":"878c61df4934b3bdaa31d3473faa6c866cd238ba99a2bba9f8c36d6ab0153e9e"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.292788 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e3a9-account-create-update-fv9lq" event={"ID":"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8","Type":"ContainerStarted","Data":"5bdc760485025b19d6d4c81dde3396033c4f3cf2ded0d7faf22a83dc564a9cb4"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.295822 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-knjqv" event={"ID":"8b380884-f228-426c-bf7c-ab261af14b51","Type":"ContainerStarted","Data":"1896d9d4507e350b1721962c1ec043b953508d834a2e514f937ff8d42dd8756d"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.297292 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-649b-account-create-update-dj8zx" event={"ID":"700cf304-d742-425e-9daf-f56b05297d38","Type":"ContainerStarted","Data":"d7297daa5f9d5b1972500c21110e905d767d44f781dc1127e5828a356b4b372e"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.298225 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-kjhz7" event={"ID":"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8","Type":"ContainerStarted","Data":"c2cb5f5a14efb03596d0c024f81f1928f082e1a0c341d750e274dcc1d740c30d"} Dec 05 15:17:29 crc kubenswrapper[4840]: I1205 15:17:29.316992 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-e3a9-account-create-update-fv9lq" podStartSLOduration=2.316954848 podStartE2EDuration="2.316954848s" podCreationTimestamp="2025-12-05 15:17:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:29.309932739 +0000 UTC m=+1127.650995353" watchObservedRunningTime="2025-12-05 15:17:29.316954848 +0000 UTC m=+1127.658017462" Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.078880 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="376013cc-6ae2-4f36-adf2-8aa481b4789f" path="/var/lib/kubelet/pods/376013cc-6ae2-4f36-adf2-8aa481b4789f/volumes" Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.309498 4840 generic.go:334] "Generic (PLEG): container finished" podID="e1c6cb4f-2b81-41ef-8d84-a207325aa9b8" containerID="878c61df4934b3bdaa31d3473faa6c866cd238ba99a2bba9f8c36d6ab0153e9e" exitCode=0 Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.309639 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e3a9-account-create-update-fv9lq" event={"ID":"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8","Type":"ContainerDied","Data":"878c61df4934b3bdaa31d3473faa6c866cd238ba99a2bba9f8c36d6ab0153e9e"} Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.311512 4840 generic.go:334] "Generic (PLEG): container finished" podID="700cf304-d742-425e-9daf-f56b05297d38" containerID="684399bcb7fea862bc4a0711b7c4613ea610382c95138fb3587ae78399ad1ffa" exitCode=0 Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.311615 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-649b-account-create-update-dj8zx" event={"ID":"700cf304-d742-425e-9daf-f56b05297d38","Type":"ContainerDied","Data":"684399bcb7fea862bc4a0711b7c4613ea610382c95138fb3587ae78399ad1ffa"} Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.313270 4840 generic.go:334] "Generic (PLEG): container finished" podID="6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8" containerID="2d3968b6397659a06ad6cae41d1ed9c5f662412c481c100caaf34ff0a72a6929" exitCode=0 Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.313375 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-kjhz7" event={"ID":"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8","Type":"ContainerDied","Data":"2d3968b6397659a06ad6cae41d1ed9c5f662412c481c100caaf34ff0a72a6929"} Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.317395 4840 generic.go:334] "Generic (PLEG): container finished" podID="c1360652-5788-4fce-9395-312b4e57b7f3" containerID="9af18c55e60b9074cfb6f27216d86ce1312856a93f46d72949a87af7aac3d771" exitCode=0 Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.317497 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nvknm" event={"ID":"c1360652-5788-4fce-9395-312b4e57b7f3","Type":"ContainerDied","Data":"9af18c55e60b9074cfb6f27216d86ce1312856a93f46d72949a87af7aac3d771"} Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.319132 4840 generic.go:334] "Generic (PLEG): container finished" podID="6240b61e-5c9f-4590-9b17-c0faae5242cb" containerID="2514754c331203c53bd291a5da392f1726ba8e83e03150eeb5d5337a50797a3d" exitCode=0 Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.319458 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c263-account-create-update-28ql2" event={"ID":"6240b61e-5c9f-4590-9b17-c0faae5242cb","Type":"ContainerDied","Data":"2514754c331203c53bd291a5da392f1726ba8e83e03150eeb5d5337a50797a3d"} Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.323803 4840 generic.go:334] "Generic (PLEG): container finished" podID="8b380884-f228-426c-bf7c-ab261af14b51" containerID="fb5d662d89f08d0c7320fe93d01a371cb676016cedc3b4e984e7fbd6bbd2a1ef" exitCode=0 Dec 05 15:17:30 crc kubenswrapper[4840]: I1205 15:17:30.323902 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-knjqv" event={"ID":"8b380884-f228-426c-bf7c-ab261af14b51","Type":"ContainerDied","Data":"fb5d662d89f08d0c7320fe93d01a371cb676016cedc3b4e984e7fbd6bbd2a1ef"} Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.350784 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nvknm" event={"ID":"c1360652-5788-4fce-9395-312b4e57b7f3","Type":"ContainerDied","Data":"0f6e95c495f252fd6c4e5c18e0bb38a420714e46554b9a11c4c449118011f644"} Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.351364 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f6e95c495f252fd6c4e5c18e0bb38a420714e46554b9a11c4c449118011f644" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.352637 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-knjqv" event={"ID":"8b380884-f228-426c-bf7c-ab261af14b51","Type":"ContainerDied","Data":"1896d9d4507e350b1721962c1ec043b953508d834a2e514f937ff8d42dd8756d"} Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.352671 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1896d9d4507e350b1721962c1ec043b953508d834a2e514f937ff8d42dd8756d" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.583806 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.626477 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.653589 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.677524 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.678853 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-264fn\" (UniqueName: \"kubernetes.io/projected/8b380884-f228-426c-bf7c-ab261af14b51-kube-api-access-264fn\") pod \"8b380884-f228-426c-bf7c-ab261af14b51\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.678947 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6994\" (UniqueName: \"kubernetes.io/projected/6240b61e-5c9f-4590-9b17-c0faae5242cb-kube-api-access-d6994\") pod \"6240b61e-5c9f-4590-9b17-c0faae5242cb\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.678977 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlg42\" (UniqueName: \"kubernetes.io/projected/c1360652-5788-4fce-9395-312b4e57b7f3-kube-api-access-wlg42\") pod \"c1360652-5788-4fce-9395-312b4e57b7f3\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.679040 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b380884-f228-426c-bf7c-ab261af14b51-operator-scripts\") pod \"8b380884-f228-426c-bf7c-ab261af14b51\" (UID: \"8b380884-f228-426c-bf7c-ab261af14b51\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.679061 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6240b61e-5c9f-4590-9b17-c0faae5242cb-operator-scripts\") pod \"6240b61e-5c9f-4590-9b17-c0faae5242cb\" (UID: \"6240b61e-5c9f-4590-9b17-c0faae5242cb\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.679114 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1360652-5788-4fce-9395-312b4e57b7f3-operator-scripts\") pod \"c1360652-5788-4fce-9395-312b4e57b7f3\" (UID: \"c1360652-5788-4fce-9395-312b4e57b7f3\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.680345 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1360652-5788-4fce-9395-312b4e57b7f3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c1360652-5788-4fce-9395-312b4e57b7f3" (UID: "c1360652-5788-4fce-9395-312b4e57b7f3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.680797 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b380884-f228-426c-bf7c-ab261af14b51-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b380884-f228-426c-bf7c-ab261af14b51" (UID: "8b380884-f228-426c-bf7c-ab261af14b51"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.681159 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6240b61e-5c9f-4590-9b17-c0faae5242cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6240b61e-5c9f-4590-9b17-c0faae5242cb" (UID: "6240b61e-5c9f-4590-9b17-c0faae5242cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.686215 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6240b61e-5c9f-4590-9b17-c0faae5242cb-kube-api-access-d6994" (OuterVolumeSpecName: "kube-api-access-d6994") pod "6240b61e-5c9f-4590-9b17-c0faae5242cb" (UID: "6240b61e-5c9f-4590-9b17-c0faae5242cb"). InnerVolumeSpecName "kube-api-access-d6994". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.688951 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1360652-5788-4fce-9395-312b4e57b7f3-kube-api-access-wlg42" (OuterVolumeSpecName: "kube-api-access-wlg42") pod "c1360652-5788-4fce-9395-312b4e57b7f3" (UID: "c1360652-5788-4fce-9395-312b4e57b7f3"). InnerVolumeSpecName "kube-api-access-wlg42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.689323 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b380884-f228-426c-bf7c-ab261af14b51-kube-api-access-264fn" (OuterVolumeSpecName: "kube-api-access-264fn") pod "8b380884-f228-426c-bf7c-ab261af14b51" (UID: "8b380884-f228-426c-bf7c-ab261af14b51"). InnerVolumeSpecName "kube-api-access-264fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.692091 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.780555 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-operator-scripts\") pod \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.780615 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-operator-scripts\") pod \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.780654 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjx57\" (UniqueName: \"kubernetes.io/projected/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-kube-api-access-tjx57\") pod \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\" (UID: \"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.780763 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx9wb\" (UniqueName: \"kubernetes.io/projected/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-kube-api-access-gx9wb\") pod \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\" (UID: \"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781244 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b380884-f228-426c-bf7c-ab261af14b51-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781263 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6240b61e-5c9f-4590-9b17-c0faae5242cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781273 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1360652-5788-4fce-9395-312b4e57b7f3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781283 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-264fn\" (UniqueName: \"kubernetes.io/projected/8b380884-f228-426c-bf7c-ab261af14b51-kube-api-access-264fn\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781294 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6994\" (UniqueName: \"kubernetes.io/projected/6240b61e-5c9f-4590-9b17-c0faae5242cb-kube-api-access-d6994\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781303 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlg42\" (UniqueName: \"kubernetes.io/projected/c1360652-5788-4fce-9395-312b4e57b7f3-kube-api-access-wlg42\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781333 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e1c6cb4f-2b81-41ef-8d84-a207325aa9b8" (UID: "e1c6cb4f-2b81-41ef-8d84-a207325aa9b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.781351 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8" (UID: "6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.784398 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-kube-api-access-gx9wb" (OuterVolumeSpecName: "kube-api-access-gx9wb") pod "6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8" (UID: "6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8"). InnerVolumeSpecName "kube-api-access-gx9wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.784791 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-kube-api-access-tjx57" (OuterVolumeSpecName: "kube-api-access-tjx57") pod "e1c6cb4f-2b81-41ef-8d84-a207325aa9b8" (UID: "e1c6cb4f-2b81-41ef-8d84-a207325aa9b8"). InnerVolumeSpecName "kube-api-access-tjx57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.821705 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.882541 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5vvl\" (UniqueName: \"kubernetes.io/projected/700cf304-d742-425e-9daf-f56b05297d38-kube-api-access-j5vvl\") pod \"700cf304-d742-425e-9daf-f56b05297d38\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.882592 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700cf304-d742-425e-9daf-f56b05297d38-operator-scripts\") pod \"700cf304-d742-425e-9daf-f56b05297d38\" (UID: \"700cf304-d742-425e-9daf-f56b05297d38\") " Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.883169 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/700cf304-d742-425e-9daf-f56b05297d38-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "700cf304-d742-425e-9daf-f56b05297d38" (UID: "700cf304-d742-425e-9daf-f56b05297d38"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.883227 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.883361 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.883379 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjx57\" (UniqueName: \"kubernetes.io/projected/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8-kube-api-access-tjx57\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.883397 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx9wb\" (UniqueName: \"kubernetes.io/projected/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8-kube-api-access-gx9wb\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.886353 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/700cf304-d742-425e-9daf-f56b05297d38-kube-api-access-j5vvl" (OuterVolumeSpecName: "kube-api-access-j5vvl") pod "700cf304-d742-425e-9daf-f56b05297d38" (UID: "700cf304-d742-425e-9daf-f56b05297d38"). InnerVolumeSpecName "kube-api-access-j5vvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.984673 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5vvl\" (UniqueName: \"kubernetes.io/projected/700cf304-d742-425e-9daf-f56b05297d38-kube-api-access-j5vvl\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:33 crc kubenswrapper[4840]: I1205 15:17:33.984704 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700cf304-d742-425e-9daf-f56b05297d38-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.371787 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-kjhz7" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.372731 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-kjhz7" event={"ID":"6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8","Type":"ContainerDied","Data":"c2cb5f5a14efb03596d0c024f81f1928f082e1a0c341d750e274dcc1d740c30d"} Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.372777 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2cb5f5a14efb03596d0c024f81f1928f082e1a0c341d750e274dcc1d740c30d" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.375947 4840 generic.go:334] "Generic (PLEG): container finished" podID="e87241c9-95a1-4890-b5c7-0d4a68d1910f" containerID="eae57e6c8c36c1fcb90f1e64dc9c58f1b0f35014599ff0c1c353296ffbe56b77" exitCode=0 Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.376057 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nc7h9" event={"ID":"e87241c9-95a1-4890-b5c7-0d4a68d1910f","Type":"ContainerDied","Data":"eae57e6c8c36c1fcb90f1e64dc9c58f1b0f35014599ff0c1c353296ffbe56b77"} Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.378460 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k7szk" event={"ID":"914b1f5a-b458-459d-ac76-d06ffd0ef611","Type":"ContainerStarted","Data":"c1f5be28a867337c667fb0b5f8bbb1b69cf7036f3d77cfe48acedc9847b1e37b"} Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.380521 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-c263-account-create-update-28ql2" event={"ID":"6240b61e-5c9f-4590-9b17-c0faae5242cb","Type":"ContainerDied","Data":"ce19b6844504f95dbd08a361305603c5c2ba76282f0d12513e9d27e99e6799b7"} Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.380555 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-c263-account-create-update-28ql2" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.380570 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce19b6844504f95dbd08a361305603c5c2ba76282f0d12513e9d27e99e6799b7" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.382891 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-e3a9-account-create-update-fv9lq" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.382927 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-e3a9-account-create-update-fv9lq" event={"ID":"e1c6cb4f-2b81-41ef-8d84-a207325aa9b8","Type":"ContainerDied","Data":"5bdc760485025b19d6d4c81dde3396033c4f3cf2ded0d7faf22a83dc564a9cb4"} Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.382976 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bdc760485025b19d6d4c81dde3396033c4f3cf2ded0d7faf22a83dc564a9cb4" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.385048 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-649b-account-create-update-dj8zx" event={"ID":"700cf304-d742-425e-9daf-f56b05297d38","Type":"ContainerDied","Data":"d7297daa5f9d5b1972500c21110e905d767d44f781dc1127e5828a356b4b372e"} Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.385085 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-649b-account-create-update-dj8zx" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.385139 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-knjqv" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.385092 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7297daa5f9d5b1972500c21110e905d767d44f781dc1127e5828a356b4b372e" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.385234 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nvknm" Dec 05 15:17:34 crc kubenswrapper[4840]: I1205 15:17:34.451203 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-k7szk" podStartSLOduration=2.145685782 podStartE2EDuration="6.451183472s" podCreationTimestamp="2025-12-05 15:17:28 +0000 UTC" firstStartedPulling="2025-12-05 15:17:29.100680095 +0000 UTC m=+1127.441742709" lastFinishedPulling="2025-12-05 15:17:33.406177785 +0000 UTC m=+1131.747240399" observedRunningTime="2025-12-05 15:17:34.428620274 +0000 UTC m=+1132.769682898" watchObservedRunningTime="2025-12-05 15:17:34.451183472 +0000 UTC m=+1132.792246096" Dec 05 15:17:35 crc kubenswrapper[4840]: I1205 15:17:35.830051 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nc7h9" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.018089 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-db-sync-config-data\") pod \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.018245 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-combined-ca-bundle\") pod \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.018388 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-config-data\") pod \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.018420 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cd6xz\" (UniqueName: \"kubernetes.io/projected/e87241c9-95a1-4890-b5c7-0d4a68d1910f-kube-api-access-cd6xz\") pod \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\" (UID: \"e87241c9-95a1-4890-b5c7-0d4a68d1910f\") " Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.025088 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e87241c9-95a1-4890-b5c7-0d4a68d1910f" (UID: "e87241c9-95a1-4890-b5c7-0d4a68d1910f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.027140 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87241c9-95a1-4890-b5c7-0d4a68d1910f-kube-api-access-cd6xz" (OuterVolumeSpecName: "kube-api-access-cd6xz") pod "e87241c9-95a1-4890-b5c7-0d4a68d1910f" (UID: "e87241c9-95a1-4890-b5c7-0d4a68d1910f"). InnerVolumeSpecName "kube-api-access-cd6xz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.044756 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e87241c9-95a1-4890-b5c7-0d4a68d1910f" (UID: "e87241c9-95a1-4890-b5c7-0d4a68d1910f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.085938 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-config-data" (OuterVolumeSpecName: "config-data") pod "e87241c9-95a1-4890-b5c7-0d4a68d1910f" (UID: "e87241c9-95a1-4890-b5c7-0d4a68d1910f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.120531 4840 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.120572 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.120584 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e87241c9-95a1-4890-b5c7-0d4a68d1910f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.120594 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cd6xz\" (UniqueName: \"kubernetes.io/projected/e87241c9-95a1-4890-b5c7-0d4a68d1910f-kube-api-access-cd6xz\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.414014 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-nc7h9" event={"ID":"e87241c9-95a1-4890-b5c7-0d4a68d1910f","Type":"ContainerDied","Data":"80b39b686cc621490324344cf538beb6e1cd872e4ef0cb7ab1d7d5e2a11deb06"} Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.414087 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80b39b686cc621490324344cf538beb6e1cd872e4ef0cb7ab1d7d5e2a11deb06" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.414958 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-nc7h9" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.841882 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-g8jn9"] Dec 05 15:17:36 crc kubenswrapper[4840]: E1205 15:17:36.842222 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b380884-f228-426c-bf7c-ab261af14b51" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842234 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b380884-f228-426c-bf7c-ab261af14b51" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: E1205 15:17:36.842248 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700cf304-d742-425e-9daf-f56b05297d38" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842254 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="700cf304-d742-425e-9daf-f56b05297d38" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: E1205 15:17:36.842268 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1360652-5788-4fce-9395-312b4e57b7f3" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842274 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1360652-5788-4fce-9395-312b4e57b7f3" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: E1205 15:17:36.842296 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842302 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: E1205 15:17:36.842314 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6240b61e-5c9f-4590-9b17-c0faae5242cb" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842320 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="6240b61e-5c9f-4590-9b17-c0faae5242cb" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: E1205 15:17:36.842329 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87241c9-95a1-4890-b5c7-0d4a68d1910f" containerName="glance-db-sync" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842335 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87241c9-95a1-4890-b5c7-0d4a68d1910f" containerName="glance-db-sync" Dec 05 15:17:36 crc kubenswrapper[4840]: E1205 15:17:36.842344 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1c6cb4f-2b81-41ef-8d84-a207325aa9b8" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842350 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1c6cb4f-2b81-41ef-8d84-a207325aa9b8" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842484 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1c6cb4f-2b81-41ef-8d84-a207325aa9b8" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842499 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="700cf304-d742-425e-9daf-f56b05297d38" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842509 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842519 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b380884-f228-426c-bf7c-ab261af14b51" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842533 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87241c9-95a1-4890-b5c7-0d4a68d1910f" containerName="glance-db-sync" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842547 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1360652-5788-4fce-9395-312b4e57b7f3" containerName="mariadb-database-create" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.842557 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="6240b61e-5c9f-4590-9b17-c0faae5242cb" containerName="mariadb-account-create-update" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.843387 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:36 crc kubenswrapper[4840]: I1205 15:17:36.869556 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-g8jn9"] Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.035609 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.035782 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7cfw\" (UniqueName: \"kubernetes.io/projected/7e6f2acf-f874-4abd-8343-0376ca5052df-kube-api-access-h7cfw\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.035839 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.035860 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-svc\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.035942 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-config\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.035958 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.137280 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.137334 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-svc\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.137358 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-config\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.137384 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.137440 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.137525 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7cfw\" (UniqueName: \"kubernetes.io/projected/7e6f2acf-f874-4abd-8343-0376ca5052df-kube-api-access-h7cfw\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.138717 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.138833 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-svc\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.138735 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.138840 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.139599 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-config\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.159023 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7cfw\" (UniqueName: \"kubernetes.io/projected/7e6f2acf-f874-4abd-8343-0376ca5052df-kube-api-access-h7cfw\") pod \"dnsmasq-dns-895cf5cf-g8jn9\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.160075 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.431513 4840 generic.go:334] "Generic (PLEG): container finished" podID="914b1f5a-b458-459d-ac76-d06ffd0ef611" containerID="c1f5be28a867337c667fb0b5f8bbb1b69cf7036f3d77cfe48acedc9847b1e37b" exitCode=0 Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.431684 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k7szk" event={"ID":"914b1f5a-b458-459d-ac76-d06ffd0ef611","Type":"ContainerDied","Data":"c1f5be28a867337c667fb0b5f8bbb1b69cf7036f3d77cfe48acedc9847b1e37b"} Dec 05 15:17:37 crc kubenswrapper[4840]: I1205 15:17:37.665535 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-g8jn9"] Dec 05 15:17:37 crc kubenswrapper[4840]: W1205 15:17:37.671979 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e6f2acf_f874_4abd_8343_0376ca5052df.slice/crio-d402eee659b9b7cdd2f7e1c30a68d7ca84c65dd643d42c6223e4809d58288fa5 WatchSource:0}: Error finding container d402eee659b9b7cdd2f7e1c30a68d7ca84c65dd643d42c6223e4809d58288fa5: Status 404 returned error can't find the container with id d402eee659b9b7cdd2f7e1c30a68d7ca84c65dd643d42c6223e4809d58288fa5 Dec 05 15:17:38 crc kubenswrapper[4840]: I1205 15:17:38.447293 4840 generic.go:334] "Generic (PLEG): container finished" podID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerID="cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f" exitCode=0 Dec 05 15:17:38 crc kubenswrapper[4840]: I1205 15:17:38.447978 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" event={"ID":"7e6f2acf-f874-4abd-8343-0376ca5052df","Type":"ContainerDied","Data":"cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f"} Dec 05 15:17:38 crc kubenswrapper[4840]: I1205 15:17:38.448021 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" event={"ID":"7e6f2acf-f874-4abd-8343-0376ca5052df","Type":"ContainerStarted","Data":"d402eee659b9b7cdd2f7e1c30a68d7ca84c65dd643d42c6223e4809d58288fa5"} Dec 05 15:17:38 crc kubenswrapper[4840]: I1205 15:17:38.934324 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.071412 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-combined-ca-bundle\") pod \"914b1f5a-b458-459d-ac76-d06ffd0ef611\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.071924 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75d4q\" (UniqueName: \"kubernetes.io/projected/914b1f5a-b458-459d-ac76-d06ffd0ef611-kube-api-access-75d4q\") pod \"914b1f5a-b458-459d-ac76-d06ffd0ef611\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.071971 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-config-data\") pod \"914b1f5a-b458-459d-ac76-d06ffd0ef611\" (UID: \"914b1f5a-b458-459d-ac76-d06ffd0ef611\") " Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.077237 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/914b1f5a-b458-459d-ac76-d06ffd0ef611-kube-api-access-75d4q" (OuterVolumeSpecName: "kube-api-access-75d4q") pod "914b1f5a-b458-459d-ac76-d06ffd0ef611" (UID: "914b1f5a-b458-459d-ac76-d06ffd0ef611"). InnerVolumeSpecName "kube-api-access-75d4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.100117 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "914b1f5a-b458-459d-ac76-d06ffd0ef611" (UID: "914b1f5a-b458-459d-ac76-d06ffd0ef611"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.116064 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-config-data" (OuterVolumeSpecName: "config-data") pod "914b1f5a-b458-459d-ac76-d06ffd0ef611" (UID: "914b1f5a-b458-459d-ac76-d06ffd0ef611"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.173507 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.173553 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75d4q\" (UniqueName: \"kubernetes.io/projected/914b1f5a-b458-459d-ac76-d06ffd0ef611-kube-api-access-75d4q\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.173574 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914b1f5a-b458-459d-ac76-d06ffd0ef611-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.458989 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" event={"ID":"7e6f2acf-f874-4abd-8343-0376ca5052df","Type":"ContainerStarted","Data":"cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324"} Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.459093 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.461843 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k7szk" event={"ID":"914b1f5a-b458-459d-ac76-d06ffd0ef611","Type":"ContainerDied","Data":"52f47094b534b0af5ab502798bade63e9b273455ca666773a364d9f29f072ab2"} Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.461901 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52f47094b534b0af5ab502798bade63e9b273455ca666773a364d9f29f072ab2" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.461931 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k7szk" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.489697 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" podStartSLOduration=3.489675205 podStartE2EDuration="3.489675205s" podCreationTimestamp="2025-12-05 15:17:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:39.482947634 +0000 UTC m=+1137.824010258" watchObservedRunningTime="2025-12-05 15:17:39.489675205 +0000 UTC m=+1137.830737819" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.632707 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-g8jn9"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.654343 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-l6zqr"] Dec 05 15:17:39 crc kubenswrapper[4840]: E1205 15:17:39.655039 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="914b1f5a-b458-459d-ac76-d06ffd0ef611" containerName="keystone-db-sync" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.655164 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="914b1f5a-b458-459d-ac76-d06ffd0ef611" containerName="keystone-db-sync" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.655486 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="914b1f5a-b458-459d-ac76-d06ffd0ef611" containerName="keystone-db-sync" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.656718 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.672853 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-l6zqr"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.679506 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7vlrn"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.694914 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.699987 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7vlrn"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.704683 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.704738 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.704688 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.704941 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6bz6r" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.705136 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787380 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787441 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-credential-keys\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787466 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-config\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787482 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787524 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-fernet-keys\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787552 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787571 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4pvt\" (UniqueName: \"kubernetes.io/projected/82c89454-681f-4632-b172-3dc98a6425dc-kube-api-access-p4pvt\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787596 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-scripts\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787626 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787650 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5dsk\" (UniqueName: \"kubernetes.io/projected/6f2ac2cd-d336-4cd8-b381-aa7188c66724-kube-api-access-n5dsk\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787672 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-combined-ca-bundle\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.787698 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-config-data\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891077 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5dsk\" (UniqueName: \"kubernetes.io/projected/6f2ac2cd-d336-4cd8-b381-aa7188c66724-kube-api-access-n5dsk\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891126 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-combined-ca-bundle\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891155 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-config-data\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891203 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891238 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-credential-keys\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891256 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-config\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891271 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891307 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-fernet-keys\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891333 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891356 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4pvt\" (UniqueName: \"kubernetes.io/projected/82c89454-681f-4632-b172-3dc98a6425dc-kube-api-access-p4pvt\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891383 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-scripts\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.891405 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.892245 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.893622 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-config\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.894313 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.895685 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.896554 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.896884 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-szwv8"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.899881 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.903972 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-fernet-keys\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.907591 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-credential-keys\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.908012 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-n52t2" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.908128 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.908044 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.908560 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-config-data\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.912343 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-scripts\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.912881 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-combined-ca-bundle\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.916975 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-szwv8"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.935671 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4pvt\" (UniqueName: \"kubernetes.io/projected/82c89454-681f-4632-b172-3dc98a6425dc-kube-api-access-p4pvt\") pod \"dnsmasq-dns-6c9c9f998c-l6zqr\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.948310 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7ff68744f9-zt2wd"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.949750 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.954115 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.954290 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.954424 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.954613 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-l7lgh" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.967258 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5dsk\" (UniqueName: \"kubernetes.io/projected/6f2ac2cd-d336-4cd8-b381-aa7188c66724-kube-api-access-n5dsk\") pod \"keystone-bootstrap-7vlrn\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.982052 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.983229 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7ff68744f9-zt2wd"] Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994120 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-db-sync-config-data\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994167 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-config-data\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994366 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-config-data\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994405 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fcd25084-75e8-447b-ba4a-2daa682a9cdb-horizon-secret-key\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994538 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-scripts\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994779 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-scripts\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994807 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pr2g\" (UniqueName: \"kubernetes.io/projected/fcd25084-75e8-447b-ba4a-2daa682a9cdb-kube-api-access-5pr2g\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.994849 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq62m\" (UniqueName: \"kubernetes.io/projected/ae68e2b9-f51d-4486-952d-73c097fbaac4-kube-api-access-xq62m\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.995143 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-combined-ca-bundle\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.995182 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcd25084-75e8-447b-ba4a-2daa682a9cdb-logs\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:39 crc kubenswrapper[4840]: I1205 15:17:39.995204 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae68e2b9-f51d-4486-952d-73c097fbaac4-etc-machine-id\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.019502 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.053930 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.072668 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.082703 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.082967 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101673 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae68e2b9-f51d-4486-952d-73c097fbaac4-etc-machine-id\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101732 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-db-sync-config-data\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101751 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-config-data\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101774 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-config-data\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101804 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fcd25084-75e8-447b-ba4a-2daa682a9cdb-horizon-secret-key\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101826 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-scripts\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101857 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-scripts\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101893 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pr2g\" (UniqueName: \"kubernetes.io/projected/fcd25084-75e8-447b-ba4a-2daa682a9cdb-kube-api-access-5pr2g\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101932 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq62m\" (UniqueName: \"kubernetes.io/projected/ae68e2b9-f51d-4486-952d-73c097fbaac4-kube-api-access-xq62m\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101957 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-combined-ca-bundle\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.101986 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcd25084-75e8-447b-ba4a-2daa682a9cdb-logs\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.102520 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcd25084-75e8-447b-ba4a-2daa682a9cdb-logs\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.102565 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae68e2b9-f51d-4486-952d-73c097fbaac4-etc-machine-id\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.105557 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-scripts\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.107881 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-scripts\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.107961 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-config-data\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.108750 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-db-sync-config-data\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.108979 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-combined-ca-bundle\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.128748 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fcd25084-75e8-447b-ba4a-2daa682a9cdb-horizon-secret-key\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.129031 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-config-data\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.140848 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pr2g\" (UniqueName: \"kubernetes.io/projected/fcd25084-75e8-447b-ba4a-2daa682a9cdb-kube-api-access-5pr2g\") pod \"horizon-7ff68744f9-zt2wd\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.162362 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq62m\" (UniqueName: \"kubernetes.io/projected/ae68e2b9-f51d-4486-952d-73c097fbaac4-kube-api-access-xq62m\") pod \"cinder-db-sync-szwv8\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.178546 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.178589 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4n4lf"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.179530 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4n4lf"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.179547 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-5ccg9"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.180242 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.180567 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.184722 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.184924 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.192855 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nz6z9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.193197 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2fq2v" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.197152 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.204506 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-log-httpd\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.204630 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.204702 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c82pl\" (UniqueName: \"kubernetes.io/projected/7a351148-31b1-402a-a8f5-0f26f81fddef-kube-api-access-c82pl\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.204727 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-run-httpd\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.204741 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-config-data\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.204756 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.204771 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-scripts\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.218092 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5ccg9"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.246907 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-l6zqr"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.251910 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-755cc8bd6f-hrt72"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.259933 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.392276 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c82pl\" (UniqueName: \"kubernetes.io/projected/7a351148-31b1-402a-a8f5-0f26f81fddef-kube-api-access-c82pl\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.392353 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-run-httpd\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.392378 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-config-data\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.392398 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.392489 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-scripts\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.392534 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-log-httpd\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.392605 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.396971 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-szwv8" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.402028 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-config-data\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.402348 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-log-httpd\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.429659 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-run-httpd\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.435241 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-8jg2f"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.463253 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.479383 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.496099 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-scripts\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.523887 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.526755 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c82pl\" (UniqueName: \"kubernetes.io/projected/7a351148-31b1-402a-a8f5-0f26f81fddef-kube-api-access-c82pl\") pod \"ceilometer-0\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.540547 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsd76\" (UniqueName: \"kubernetes.io/projected/509c87f8-fee0-4a27-ad42-91629218a636-kube-api-access-nsd76\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.540902 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-db-sync-config-data\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.540985 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-combined-ca-bundle\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541017 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlw2t\" (UniqueName: \"kubernetes.io/projected/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-kube-api-access-qlw2t\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541075 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-config-data\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541096 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-logs\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541159 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-combined-ca-bundle\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541217 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-scripts\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541250 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-horizon-secret-key\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541295 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-config\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.541347 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f878p\" (UniqueName: \"kubernetes.io/projected/5731646f-d8c6-4bfd-b815-3d68c244d801-kube-api-access-f878p\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.555268 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.602285 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-755cc8bd6f-hrt72"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.635713 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645656 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bwgj\" (UniqueName: \"kubernetes.io/projected/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-kube-api-access-8bwgj\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645712 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-config-data\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645737 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-logs\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645772 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-combined-ca-bundle\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645812 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-scripts\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645887 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-config\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645932 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-horizon-secret-key\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645955 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-config\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.645993 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647132 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-logs\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647440 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f878p\" (UniqueName: \"kubernetes.io/projected/5731646f-d8c6-4bfd-b815-3d68c244d801-kube-api-access-f878p\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647484 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647557 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsd76\" (UniqueName: \"kubernetes.io/projected/509c87f8-fee0-4a27-ad42-91629218a636-kube-api-access-nsd76\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647719 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647758 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-db-sync-config-data\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647829 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-combined-ca-bundle\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647889 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.647919 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlw2t\" (UniqueName: \"kubernetes.io/projected/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-kube-api-access-qlw2t\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.648565 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-config-data\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.649332 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-scripts\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.658220 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-horizon-secret-key\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.658811 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-db-sync-config-data\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.660954 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-combined-ca-bundle\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.664775 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-config\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.672494 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-combined-ca-bundle\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.676370 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsd76\" (UniqueName: \"kubernetes.io/projected/509c87f8-fee0-4a27-ad42-91629218a636-kube-api-access-nsd76\") pod \"neutron-db-sync-4n4lf\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.677779 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlw2t\" (UniqueName: \"kubernetes.io/projected/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-kube-api-access-qlw2t\") pod \"horizon-755cc8bd6f-hrt72\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.678792 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.681839 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f878p\" (UniqueName: \"kubernetes.io/projected/5731646f-d8c6-4bfd-b815-3d68c244d801-kube-api-access-f878p\") pod \"barbican-db-sync-5ccg9\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.691323 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.755579 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.755712 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.755790 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bwgj\" (UniqueName: \"kubernetes.io/projected/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-kube-api-access-8bwgj\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.755925 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-config\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.755991 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.756048 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.757612 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.759636 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.762328 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-config\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.762941 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-8jg2f"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.763531 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.768372 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.782839 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.785013 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.793487 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.793714 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.793860 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-j44ls" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.797429 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-dfgrm"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.798632 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.807010 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-mkgbz" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.807142 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.811104 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.812049 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bwgj\" (UniqueName: \"kubernetes.io/projected/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-kube-api-access-8bwgj\") pod \"dnsmasq-dns-57c957c4ff-8jg2f\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.834058 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.846267 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-dfgrm"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858392 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-combined-ca-bundle\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858473 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-config-data\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858502 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5fwz\" (UniqueName: \"kubernetes.io/projected/15e31586-1918-4aca-b3cc-eb2e2e6696d5-kube-api-access-k5fwz\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858527 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dab20e36-d38f-4c5f-9d42-028c9df5ca51-logs\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858563 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-config-data\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858600 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-scripts\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858706 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858770 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-logs\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858796 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858819 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6p8p\" (UniqueName: \"kubernetes.io/projected/dab20e36-d38f-4c5f-9d42-028c9df5ca51-kube-api-access-m6p8p\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858841 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.858900 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-scripts\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.867366 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.869211 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.874161 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.897416 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:17:40 crc kubenswrapper[4840]: I1205 15:17:40.919175 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008342 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-scripts\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008399 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008428 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008465 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008486 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-logs\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008514 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008523 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.008534 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6p8p\" (UniqueName: \"kubernetes.io/projected/dab20e36-d38f-4c5f-9d42-028c9df5ca51-kube-api-access-m6p8p\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009004 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009048 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-scripts\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009102 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009126 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009160 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009172 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009187 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-combined-ca-bundle\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009265 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9bv9\" (UniqueName: \"kubernetes.io/projected/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-kube-api-access-q9bv9\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009290 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-config-data\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009312 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5fwz\" (UniqueName: \"kubernetes.io/projected/15e31586-1918-4aca-b3cc-eb2e2e6696d5-kube-api-access-k5fwz\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009333 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dab20e36-d38f-4c5f-9d42-028c9df5ca51-logs\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009363 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009400 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-logs\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009403 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-config-data\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.009687 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.017626 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-scripts\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.019369 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dab20e36-d38f-4c5f-9d42-028c9df5ca51-logs\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.025002 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-config-data\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.027041 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-combined-ca-bundle\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.027035 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.035841 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-config-data\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.041111 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-scripts\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.047274 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6p8p\" (UniqueName: \"kubernetes.io/projected/dab20e36-d38f-4c5f-9d42-028c9df5ca51-kube-api-access-m6p8p\") pod \"placement-db-sync-dfgrm\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.049757 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5fwz\" (UniqueName: \"kubernetes.io/projected/15e31586-1918-4aca-b3cc-eb2e2e6696d5-kube-api-access-k5fwz\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.067176 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-l6zqr"] Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.068936 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.110848 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.110923 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.110997 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.111018 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.111040 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.111080 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9bv9\" (UniqueName: \"kubernetes.io/projected/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-kube-api-access-q9bv9\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.111104 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.111969 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.112902 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.113480 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-logs\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.118068 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.126756 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.135316 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9bv9\" (UniqueName: \"kubernetes.io/projected/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-kube-api-access-q9bv9\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.146792 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.148119 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.156589 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.166558 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dfgrm" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.225532 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.241676 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7vlrn"] Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.250561 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-szwv8"] Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.562276 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7ff68744f9-zt2wd"] Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.611445 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vlrn" event={"ID":"6f2ac2cd-d336-4cd8-b381-aa7188c66724","Type":"ContainerStarted","Data":"2f1c094ad02fcdaf9a1b731af542be4e35fdd6eb3cf2955db5e4e5812dc95501"} Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.623088 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" event={"ID":"82c89454-681f-4632-b172-3dc98a6425dc","Type":"ContainerStarted","Data":"5583e1fae8e84781d5a8aac637eb03f3869daee0c662769865a80258adb6e787"} Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.626534 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" podUID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerName="dnsmasq-dns" containerID="cri-o://cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324" gracePeriod=10 Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.626563 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-szwv8" event={"ID":"ae68e2b9-f51d-4486-952d-73c097fbaac4","Type":"ContainerStarted","Data":"b0df462856c5e1122ba18273113d0b7e4c0338a486a192a363945f18f2c26289"} Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.638829 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:17:41 crc kubenswrapper[4840]: W1205 15:17:41.672964 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a351148_31b1_402a_a8f5_0f26f81fddef.slice/crio-7e5759ba821fbc43654734a2d29a8808ba206f616abf3e4e535d13d4ee4578bb WatchSource:0}: Error finding container 7e5759ba821fbc43654734a2d29a8808ba206f616abf3e4e535d13d4ee4578bb: Status 404 returned error can't find the container with id 7e5759ba821fbc43654734a2d29a8808ba206f616abf3e4e535d13d4ee4578bb Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.851095 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-755cc8bd6f-hrt72"] Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.863687 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4n4lf"] Dec 05 15:17:41 crc kubenswrapper[4840]: I1205 15:17:41.987187 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5ccg9"] Dec 05 15:17:42 crc kubenswrapper[4840]: I1205 15:17:42.003672 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-8jg2f"] Dec 05 15:17:42 crc kubenswrapper[4840]: W1205 15:17:42.010314 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c611c21_c0e0_4af2_a5a7_195807a6fcfa.slice/crio-8fc0d29d6d6f916c49bb0558ad61c2b4bb1bcf8acc95d0110ddb8a51837e7869 WatchSource:0}: Error finding container 8fc0d29d6d6f916c49bb0558ad61c2b4bb1bcf8acc95d0110ddb8a51837e7869: Status 404 returned error can't find the container with id 8fc0d29d6d6f916c49bb0558ad61c2b4bb1bcf8acc95d0110ddb8a51837e7869 Dec 05 15:17:42 crc kubenswrapper[4840]: I1205 15:17:42.217707 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-dfgrm"] Dec 05 15:17:42 crc kubenswrapper[4840]: I1205 15:17:42.360073 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:17:43 crc kubenswrapper[4840]: W1205 15:17:42.393075 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6480440_9c3d_4541_b5f1_baaeb1e4d6f6.slice/crio-4cc11f691f77d7deab7b576a674cd1c617fb8afff29f93977369b021065a51b4 WatchSource:0}: Error finding container 4cc11f691f77d7deab7b576a674cd1c617fb8afff29f93977369b021065a51b4: Status 404 returned error can't find the container with id 4cc11f691f77d7deab7b576a674cd1c617fb8afff29f93977369b021065a51b4 Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.476153 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:43 crc kubenswrapper[4840]: W1205 15:17:42.476309 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15e31586_1918_4aca_b3cc_eb2e2e6696d5.slice/crio-bea34ae1bd9cdce9f9b5d3488c14aa9264ed00ffc67f21bb6a3889e328d1fd52 WatchSource:0}: Error finding container bea34ae1bd9cdce9f9b5d3488c14aa9264ed00ffc67f21bb6a3889e328d1fd52: Status 404 returned error can't find the container with id bea34ae1bd9cdce9f9b5d3488c14aa9264ed00ffc67f21bb6a3889e328d1fd52 Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.526460 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.574651 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-config\") pod \"7e6f2acf-f874-4abd-8343-0376ca5052df\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.575350 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-swift-storage-0\") pod \"7e6f2acf-f874-4abd-8343-0376ca5052df\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.577083 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-svc\") pod \"7e6f2acf-f874-4abd-8343-0376ca5052df\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.577116 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-nb\") pod \"7e6f2acf-f874-4abd-8343-0376ca5052df\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.577180 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7cfw\" (UniqueName: \"kubernetes.io/projected/7e6f2acf-f874-4abd-8343-0376ca5052df-kube-api-access-h7cfw\") pod \"7e6f2acf-f874-4abd-8343-0376ca5052df\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.577204 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-sb\") pod \"7e6f2acf-f874-4abd-8343-0376ca5052df\" (UID: \"7e6f2acf-f874-4abd-8343-0376ca5052df\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.602673 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e6f2acf-f874-4abd-8343-0376ca5052df-kube-api-access-h7cfw" (OuterVolumeSpecName: "kube-api-access-h7cfw") pod "7e6f2acf-f874-4abd-8343-0376ca5052df" (UID: "7e6f2acf-f874-4abd-8343-0376ca5052df"). InnerVolumeSpecName "kube-api-access-h7cfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.680000 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7cfw\" (UniqueName: \"kubernetes.io/projected/7e6f2acf-f874-4abd-8343-0376ca5052df-kube-api-access-h7cfw\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.682507 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7e6f2acf-f874-4abd-8343-0376ca5052df" (UID: "7e6f2acf-f874-4abd-8343-0376ca5052df"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.682694 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7e6f2acf-f874-4abd-8343-0376ca5052df" (UID: "7e6f2acf-f874-4abd-8343-0376ca5052df"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.738276 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-config" (OuterVolumeSpecName: "config") pod "7e6f2acf-f874-4abd-8343-0376ca5052df" (UID: "7e6f2acf-f874-4abd-8343-0376ca5052df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.739654 4840 generic.go:334] "Generic (PLEG): container finished" podID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerID="2efc93813c355b880154dfa4921ded0b1a54c5748e138457ab48162127ee44c2" exitCode=0 Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.739708 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" event={"ID":"7c611c21-c0e0-4af2-a5a7-195807a6fcfa","Type":"ContainerDied","Data":"2efc93813c355b880154dfa4921ded0b1a54c5748e138457ab48162127ee44c2"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.739743 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" event={"ID":"7c611c21-c0e0-4af2-a5a7-195807a6fcfa","Type":"ContainerStarted","Data":"8fc0d29d6d6f916c49bb0558ad61c2b4bb1bcf8acc95d0110ddb8a51837e7869"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.754281 4840 generic.go:334] "Generic (PLEG): container finished" podID="82c89454-681f-4632-b172-3dc98a6425dc" containerID="7dd43fbf22f46bd57f1e1b8d9bdbcd074e9967ccd5b97b43b7c241fb8acdd095" exitCode=0 Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.754337 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" event={"ID":"82c89454-681f-4632-b172-3dc98a6425dc","Type":"ContainerDied","Data":"7dd43fbf22f46bd57f1e1b8d9bdbcd074e9967ccd5b97b43b7c241fb8acdd095"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.761594 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-755cc8bd6f-hrt72" event={"ID":"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe","Type":"ContainerStarted","Data":"7feb31533728811b38c81fe181c214d79f5e6506e66bb635b7e2f671b4968f29"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.773613 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ff68744f9-zt2wd" event={"ID":"fcd25084-75e8-447b-ba4a-2daa682a9cdb","Type":"ContainerStarted","Data":"8e9a99a48f86a8c1d18bea7341a9f8f479ac8f609b66948a75dac9b3d48a2b60"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.778946 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7e6f2acf-f874-4abd-8343-0376ca5052df" (UID: "7e6f2acf-f874-4abd-8343-0376ca5052df"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.780049 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7e6f2acf-f874-4abd-8343-0376ca5052df" (UID: "7e6f2acf-f874-4abd-8343-0376ca5052df"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.781787 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.781813 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.781824 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.781832 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.781841 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e6f2acf-f874-4abd-8343-0376ca5052df-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.807737 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerStarted","Data":"7e5759ba821fbc43654734a2d29a8808ba206f616abf3e4e535d13d4ee4578bb"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.817660 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15e31586-1918-4aca-b3cc-eb2e2e6696d5","Type":"ContainerStarted","Data":"bea34ae1bd9cdce9f9b5d3488c14aa9264ed00ffc67f21bb6a3889e328d1fd52"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.824638 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6","Type":"ContainerStarted","Data":"4cc11f691f77d7deab7b576a674cd1c617fb8afff29f93977369b021065a51b4"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.835047 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vlrn" event={"ID":"6f2ac2cd-d336-4cd8-b381-aa7188c66724","Type":"ContainerStarted","Data":"ac75e4d88c6f325fe89f2f2322280dca678553a071c8e13b4e8a309f30d8d211"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.838682 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4n4lf" event={"ID":"509c87f8-fee0-4a27-ad42-91629218a636","Type":"ContainerStarted","Data":"1e063e58e8056843d23d3cce09fd405c30b78db53acd91021026019c84d940a6"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.838722 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4n4lf" event={"ID":"509c87f8-fee0-4a27-ad42-91629218a636","Type":"ContainerStarted","Data":"5a58954597eafc012f677075b09d3db83d4921e085f0b7c9f561b4acca9dbdc2"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.844256 4840 generic.go:334] "Generic (PLEG): container finished" podID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerID="cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324" exitCode=0 Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.844344 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" event={"ID":"7e6f2acf-f874-4abd-8343-0376ca5052df","Type":"ContainerDied","Data":"cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.844374 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" event={"ID":"7e6f2acf-f874-4abd-8343-0376ca5052df","Type":"ContainerDied","Data":"d402eee659b9b7cdd2f7e1c30a68d7ca84c65dd643d42c6223e4809d58288fa5"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.844421 4840 scope.go:117] "RemoveContainer" containerID="cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.844624 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-g8jn9" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.849482 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dfgrm" event={"ID":"dab20e36-d38f-4c5f-9d42-028c9df5ca51","Type":"ContainerStarted","Data":"ab0e1190c85519f927dcda003f61da4aaff3fac944c4f43b264a510f53c0802f"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.863847 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ccg9" event={"ID":"5731646f-d8c6-4bfd-b815-3d68c244d801","Type":"ContainerStarted","Data":"cfc1e130db365a8bab56b07781f2885c5dac04ebb6e6613324f3e4d663e9a6eb"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.867507 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7vlrn" podStartSLOduration=3.867484619 podStartE2EDuration="3.867484619s" podCreationTimestamp="2025-12-05 15:17:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:42.852237108 +0000 UTC m=+1141.193299722" watchObservedRunningTime="2025-12-05 15:17:42.867484619 +0000 UTC m=+1141.208547223" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.890477 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4n4lf" podStartSLOduration=2.89045863 podStartE2EDuration="2.89045863s" podCreationTimestamp="2025-12-05 15:17:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:42.888302609 +0000 UTC m=+1141.229365223" watchObservedRunningTime="2025-12-05 15:17:42.89045863 +0000 UTC m=+1141.231521244" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.898619 4840 scope.go:117] "RemoveContainer" containerID="cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.920292 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-g8jn9"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:42.931676 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-g8jn9"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.005306 4840 scope.go:117] "RemoveContainer" containerID="cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324" Dec 05 15:17:43 crc kubenswrapper[4840]: E1205 15:17:43.077982 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324\": container with ID starting with cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324 not found: ID does not exist" containerID="cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.078021 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324"} err="failed to get container status \"cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324\": rpc error: code = NotFound desc = could not find container \"cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324\": container with ID starting with cd76f56a6bb03700b8aab84d151ab10631ff315830b62d3df970ac8efa96b324 not found: ID does not exist" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.078048 4840 scope.go:117] "RemoveContainer" containerID="cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f" Dec 05 15:17:43 crc kubenswrapper[4840]: E1205 15:17:43.078651 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f\": container with ID starting with cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f not found: ID does not exist" containerID="cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.078717 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f"} err="failed to get container status \"cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f\": rpc error: code = NotFound desc = could not find container \"cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f\": container with ID starting with cac958a7240a0d83b0aad8397e2bdf65e4c0a0b8b43ee8dac34d5b7b3abf780f not found: ID does not exist" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.146891 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.185586 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7ff68744f9-zt2wd"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.249510 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7bd49c98bf-w2wph"] Dec 05 15:17:43 crc kubenswrapper[4840]: E1205 15:17:43.249924 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerName="init" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.249935 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerName="init" Dec 05 15:17:43 crc kubenswrapper[4840]: E1205 15:17:43.249949 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerName="dnsmasq-dns" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.249955 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerName="dnsmasq-dns" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.250132 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e6f2acf-f874-4abd-8343-0376ca5052df" containerName="dnsmasq-dns" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.251038 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.283801 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7bd49c98bf-w2wph"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.306882 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.355691 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.404051 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-config-data\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.404130 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7s7n\" (UniqueName: \"kubernetes.io/projected/9f5fb305-4844-4628-a5d9-ed4115133a29-kube-api-access-r7s7n\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.404174 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f5fb305-4844-4628-a5d9-ed4115133a29-horizon-secret-key\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.404213 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-scripts\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.404291 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f5fb305-4844-4628-a5d9-ed4115133a29-logs\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.506143 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f5fb305-4844-4628-a5d9-ed4115133a29-logs\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.506248 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-config-data\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.506283 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7s7n\" (UniqueName: \"kubernetes.io/projected/9f5fb305-4844-4628-a5d9-ed4115133a29-kube-api-access-r7s7n\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.506306 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f5fb305-4844-4628-a5d9-ed4115133a29-horizon-secret-key\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.506337 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-scripts\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.507084 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-scripts\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.507350 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f5fb305-4844-4628-a5d9-ed4115133a29-logs\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.508203 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-config-data\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.533891 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7s7n\" (UniqueName: \"kubernetes.io/projected/9f5fb305-4844-4628-a5d9-ed4115133a29-kube-api-access-r7s7n\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.534162 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f5fb305-4844-4628-a5d9-ed4115133a29-horizon-secret-key\") pod \"horizon-7bd49c98bf-w2wph\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.578818 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.693680 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.821394 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-sb\") pod \"82c89454-681f-4632-b172-3dc98a6425dc\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.821518 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-nb\") pod \"82c89454-681f-4632-b172-3dc98a6425dc\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.821554 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-swift-storage-0\") pod \"82c89454-681f-4632-b172-3dc98a6425dc\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.821652 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-svc\") pod \"82c89454-681f-4632-b172-3dc98a6425dc\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.821673 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-config\") pod \"82c89454-681f-4632-b172-3dc98a6425dc\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.821707 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4pvt\" (UniqueName: \"kubernetes.io/projected/82c89454-681f-4632-b172-3dc98a6425dc-kube-api-access-p4pvt\") pod \"82c89454-681f-4632-b172-3dc98a6425dc\" (UID: \"82c89454-681f-4632-b172-3dc98a6425dc\") " Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.828987 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82c89454-681f-4632-b172-3dc98a6425dc-kube-api-access-p4pvt" (OuterVolumeSpecName: "kube-api-access-p4pvt") pod "82c89454-681f-4632-b172-3dc98a6425dc" (UID: "82c89454-681f-4632-b172-3dc98a6425dc"). InnerVolumeSpecName "kube-api-access-p4pvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.854691 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "82c89454-681f-4632-b172-3dc98a6425dc" (UID: "82c89454-681f-4632-b172-3dc98a6425dc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.876559 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-config" (OuterVolumeSpecName: "config") pod "82c89454-681f-4632-b172-3dc98a6425dc" (UID: "82c89454-681f-4632-b172-3dc98a6425dc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.876836 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "82c89454-681f-4632-b172-3dc98a6425dc" (UID: "82c89454-681f-4632-b172-3dc98a6425dc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.891751 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" event={"ID":"82c89454-681f-4632-b172-3dc98a6425dc","Type":"ContainerDied","Data":"5583e1fae8e84781d5a8aac637eb03f3869daee0c662769865a80258adb6e787"} Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.892206 4840 scope.go:117] "RemoveContainer" containerID="7dd43fbf22f46bd57f1e1b8d9bdbcd074e9967ccd5b97b43b7c241fb8acdd095" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.892054 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-l6zqr" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.896211 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "82c89454-681f-4632-b172-3dc98a6425dc" (UID: "82c89454-681f-4632-b172-3dc98a6425dc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.904247 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "82c89454-681f-4632-b172-3dc98a6425dc" (UID: "82c89454-681f-4632-b172-3dc98a6425dc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.925341 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.925432 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.925447 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.925462 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.925476 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82c89454-681f-4632-b172-3dc98a6425dc-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:43 crc kubenswrapper[4840]: I1205 15:17:43.925488 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4pvt\" (UniqueName: \"kubernetes.io/projected/82c89454-681f-4632-b172-3dc98a6425dc-kube-api-access-p4pvt\") on node \"crc\" DevicePath \"\"" Dec 05 15:17:44 crc kubenswrapper[4840]: I1205 15:17:44.177824 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" podStartSLOduration=4.177795968 podStartE2EDuration="4.177795968s" podCreationTimestamp="2025-12-05 15:17:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:44.174819854 +0000 UTC m=+1142.515882468" watchObservedRunningTime="2025-12-05 15:17:44.177795968 +0000 UTC m=+1142.518858582" Dec 05 15:17:44 crc kubenswrapper[4840]: I1205 15:17:44.180538 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e6f2acf-f874-4abd-8343-0376ca5052df" path="/var/lib/kubelet/pods/7e6f2acf-f874-4abd-8343-0376ca5052df/volumes" Dec 05 15:17:44 crc kubenswrapper[4840]: I1205 15:17:44.184515 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:44 crc kubenswrapper[4840]: I1205 15:17:44.186293 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" event={"ID":"7c611c21-c0e0-4af2-a5a7-195807a6fcfa","Type":"ContainerStarted","Data":"1e626ca15f78583df8491eb4b1e3e042fa53b4bdbe9a9e625733b1b9bd2b0168"} Dec 05 15:17:44 crc kubenswrapper[4840]: I1205 15:17:44.358775 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-l6zqr"] Dec 05 15:17:44 crc kubenswrapper[4840]: I1205 15:17:44.382667 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-l6zqr"] Dec 05 15:17:44 crc kubenswrapper[4840]: I1205 15:17:44.544572 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7bd49c98bf-w2wph"] Dec 05 15:17:44 crc kubenswrapper[4840]: W1205 15:17:44.546065 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f5fb305_4844_4628_a5d9_ed4115133a29.slice/crio-ba9952dd0f91cad05b5a6b439d028d5dc6657851e6f755ec670ce783fc702ed1 WatchSource:0}: Error finding container ba9952dd0f91cad05b5a6b439d028d5dc6657851e6f755ec670ce783fc702ed1: Status 404 returned error can't find the container with id ba9952dd0f91cad05b5a6b439d028d5dc6657851e6f755ec670ce783fc702ed1 Dec 05 15:17:45 crc kubenswrapper[4840]: I1205 15:17:45.167910 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bd49c98bf-w2wph" event={"ID":"9f5fb305-4844-4628-a5d9-ed4115133a29","Type":"ContainerStarted","Data":"ba9952dd0f91cad05b5a6b439d028d5dc6657851e6f755ec670ce783fc702ed1"} Dec 05 15:17:46 crc kubenswrapper[4840]: I1205 15:17:46.178746 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82c89454-681f-4632-b172-3dc98a6425dc" path="/var/lib/kubelet/pods/82c89454-681f-4632-b172-3dc98a6425dc/volumes" Dec 05 15:17:46 crc kubenswrapper[4840]: I1205 15:17:46.189723 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15e31586-1918-4aca-b3cc-eb2e2e6696d5","Type":"ContainerStarted","Data":"56bbb72ebdbd7200e23113548fe03f37091594b9e8a497428b7f84a2f03afec4"} Dec 05 15:17:46 crc kubenswrapper[4840]: I1205 15:17:46.191705 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6","Type":"ContainerStarted","Data":"1aea1aa0b08d267942f211593146a9caedafc50a42884b731963033ed9a43f45"} Dec 05 15:17:48 crc kubenswrapper[4840]: I1205 15:17:48.500946 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15e31586-1918-4aca-b3cc-eb2e2e6696d5","Type":"ContainerStarted","Data":"619117a4d30ec0de772a2813f293735076515512351f1d69c8cfca5655056a1d"} Dec 05 15:17:48 crc kubenswrapper[4840]: I1205 15:17:48.501304 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-log" containerID="cri-o://56bbb72ebdbd7200e23113548fe03f37091594b9e8a497428b7f84a2f03afec4" gracePeriod=30 Dec 05 15:17:48 crc kubenswrapper[4840]: I1205 15:17:48.501534 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-httpd" containerID="cri-o://619117a4d30ec0de772a2813f293735076515512351f1d69c8cfca5655056a1d" gracePeriod=30 Dec 05 15:17:49 crc kubenswrapper[4840]: I1205 15:17:49.034063 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6","Type":"ContainerStarted","Data":"16a4ed2f0d915b6604fb95d8188ec9107b859bf66ea7c94959c3d34ee0f6cf61"} Dec 05 15:17:49 crc kubenswrapper[4840]: I1205 15:17:49.034277 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-log" containerID="cri-o://1aea1aa0b08d267942f211593146a9caedafc50a42884b731963033ed9a43f45" gracePeriod=30 Dec 05 15:17:49 crc kubenswrapper[4840]: I1205 15:17:49.034479 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-httpd" containerID="cri-o://16a4ed2f0d915b6604fb95d8188ec9107b859bf66ea7c94959c3d34ee0f6cf61" gracePeriod=30 Dec 05 15:17:49 crc kubenswrapper[4840]: I1205 15:17:49.068031 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.065856243 podStartE2EDuration="9.065856243s" podCreationTimestamp="2025-12-05 15:17:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:49.036017398 +0000 UTC m=+1147.377080012" watchObservedRunningTime="2025-12-05 15:17:49.065856243 +0000 UTC m=+1147.406918847" Dec 05 15:17:49 crc kubenswrapper[4840]: I1205 15:17:49.094588 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.094573776 podStartE2EDuration="9.094573776s" podCreationTimestamp="2025-12-05 15:17:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:17:49.094315699 +0000 UTC m=+1147.435378313" watchObservedRunningTime="2025-12-05 15:17:49.094573776 +0000 UTC m=+1147.435636390" Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.414471 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.414651 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.484214 4840 generic.go:334] "Generic (PLEG): container finished" podID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerID="56bbb72ebdbd7200e23113548fe03f37091594b9e8a497428b7f84a2f03afec4" exitCode=143 Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.491906 4840 generic.go:334] "Generic (PLEG): container finished" podID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerID="16a4ed2f0d915b6604fb95d8188ec9107b859bf66ea7c94959c3d34ee0f6cf61" exitCode=0 Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.491934 4840 generic.go:334] "Generic (PLEG): container finished" podID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerID="1aea1aa0b08d267942f211593146a9caedafc50a42884b731963033ed9a43f45" exitCode=143 Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.602556 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15e31586-1918-4aca-b3cc-eb2e2e6696d5","Type":"ContainerDied","Data":"56bbb72ebdbd7200e23113548fe03f37091594b9e8a497428b7f84a2f03afec4"} Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.602599 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6","Type":"ContainerDied","Data":"16a4ed2f0d915b6604fb95d8188ec9107b859bf66ea7c94959c3d34ee0f6cf61"} Dec 05 15:17:50 crc kubenswrapper[4840]: I1205 15:17:50.602618 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6","Type":"ContainerDied","Data":"1aea1aa0b08d267942f211593146a9caedafc50a42884b731963033ed9a43f45"} Dec 05 15:17:50 crc kubenswrapper[4840]: E1205 15:17:50.654410 4840 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6480440_9c3d_4541_b5f1_baaeb1e4d6f6.slice/crio-16a4ed2f0d915b6604fb95d8188ec9107b859bf66ea7c94959c3d34ee0f6cf61.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15e31586_1918_4aca_b3cc_eb2e2e6696d5.slice/crio-56bbb72ebdbd7200e23113548fe03f37091594b9e8a497428b7f84a2f03afec4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6480440_9c3d_4541_b5f1_baaeb1e4d6f6.slice/crio-conmon-1aea1aa0b08d267942f211593146a9caedafc50a42884b731963033ed9a43f45.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6480440_9c3d_4541_b5f1_baaeb1e4d6f6.slice/crio-1aea1aa0b08d267942f211593146a9caedafc50a42884b731963033ed9a43f45.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15e31586_1918_4aca_b3cc_eb2e2e6696d5.slice/crio-conmon-56bbb72ebdbd7200e23113548fe03f37091594b9e8a497428b7f84a2f03afec4.scope\": RecentStats: unable to find data in memory cache]" Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.006988 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.091717 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-5n2bl"] Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.091972 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" containerID="cri-o://1ce06db264cce1f0192fc6f568105da48de1b8a6728582327e6c7e9d6134f2be" gracePeriod=10 Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.508857 4840 generic.go:334] "Generic (PLEG): container finished" podID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerID="619117a4d30ec0de772a2813f293735076515512351f1d69c8cfca5655056a1d" exitCode=0 Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.509296 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15e31586-1918-4aca-b3cc-eb2e2e6696d5","Type":"ContainerDied","Data":"619117a4d30ec0de772a2813f293735076515512351f1d69c8cfca5655056a1d"} Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.512259 4840 generic.go:334] "Generic (PLEG): container finished" podID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerID="1ce06db264cce1f0192fc6f568105da48de1b8a6728582327e6c7e9d6134f2be" exitCode=0 Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.512294 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" event={"ID":"9ead46a4-d789-4a22-9332-ed2c4f706010","Type":"ContainerDied","Data":"1ce06db264cce1f0192fc6f568105da48de1b8a6728582327e6c7e9d6134f2be"} Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.736622 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 05 15:17:51 crc kubenswrapper[4840]: I1205 15:17:51.924400 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-755cc8bd6f-hrt72"] Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.085111 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-67c65cc6bd-s49k5"] Dec 05 15:17:52 crc kubenswrapper[4840]: E1205 15:17:52.089520 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82c89454-681f-4632-b172-3dc98a6425dc" containerName="init" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.089563 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="82c89454-681f-4632-b172-3dc98a6425dc" containerName="init" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.089839 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="82c89454-681f-4632-b172-3dc98a6425dc" containerName="init" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.091041 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.091847 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67c65cc6bd-s49k5"] Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.095598 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.121700 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7bd49c98bf-w2wph"] Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.163734 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-scripts\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.163811 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-tls-certs\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.163918 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-config-data\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.163996 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-combined-ca-bundle\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.168846 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-secret-key\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.168955 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmrwd\" (UniqueName: \"kubernetes.io/projected/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-kube-api-access-nmrwd\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.169037 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-logs\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.196532 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b99464548-lx7k9"] Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.199798 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.227644 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b99464548-lx7k9"] Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.271402 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-horizon-secret-key\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.271497 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4jmw\" (UniqueName: \"kubernetes.io/projected/d384faa1-3b3b-45f8-bf4b-902236ec40da-kube-api-access-r4jmw\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.271680 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-secret-key\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.271769 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmrwd\" (UniqueName: \"kubernetes.io/projected/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-kube-api-access-nmrwd\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.271845 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-logs\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.271991 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-scripts\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272026 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-tls-certs\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272049 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d384faa1-3b3b-45f8-bf4b-902236ec40da-scripts\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272140 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-config-data\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272172 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d384faa1-3b3b-45f8-bf4b-902236ec40da-config-data\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272216 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-combined-ca-bundle\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272236 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-horizon-tls-certs\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272352 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-combined-ca-bundle\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.272413 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d384faa1-3b3b-45f8-bf4b-902236ec40da-logs\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.273609 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-logs\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.273818 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-scripts\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.274812 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-config-data\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.281039 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-secret-key\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.283174 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-combined-ca-bundle\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.283410 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-tls-certs\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.304471 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmrwd\" (UniqueName: \"kubernetes.io/projected/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-kube-api-access-nmrwd\") pod \"horizon-67c65cc6bd-s49k5\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.379824 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d384faa1-3b3b-45f8-bf4b-902236ec40da-scripts\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.379931 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d384faa1-3b3b-45f8-bf4b-902236ec40da-config-data\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.379967 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-horizon-tls-certs\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.379992 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-combined-ca-bundle\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.380025 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d384faa1-3b3b-45f8-bf4b-902236ec40da-logs\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.380059 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-horizon-secret-key\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.380102 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4jmw\" (UniqueName: \"kubernetes.io/projected/d384faa1-3b3b-45f8-bf4b-902236ec40da-kube-api-access-r4jmw\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.381752 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d384faa1-3b3b-45f8-bf4b-902236ec40da-scripts\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.382802 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d384faa1-3b3b-45f8-bf4b-902236ec40da-logs\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.383569 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d384faa1-3b3b-45f8-bf4b-902236ec40da-config-data\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.387224 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-horizon-secret-key\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.389118 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-horizon-tls-certs\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.390122 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d384faa1-3b3b-45f8-bf4b-902236ec40da-combined-ca-bundle\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.407611 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4jmw\" (UniqueName: \"kubernetes.io/projected/d384faa1-3b3b-45f8-bf4b-902236ec40da-kube-api-access-r4jmw\") pod \"horizon-7b99464548-lx7k9\" (UID: \"d384faa1-3b3b-45f8-bf4b-902236ec40da\") " pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.432822 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.521169 4840 generic.go:334] "Generic (PLEG): container finished" podID="6f2ac2cd-d336-4cd8-b381-aa7188c66724" containerID="ac75e4d88c6f325fe89f2f2322280dca678553a071c8e13b4e8a309f30d8d211" exitCode=0 Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.521242 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vlrn" event={"ID":"6f2ac2cd-d336-4cd8-b381-aa7188c66724","Type":"ContainerDied","Data":"ac75e4d88c6f325fe89f2f2322280dca678553a071c8e13b4e8a309f30d8d211"} Dec 05 15:17:52 crc kubenswrapper[4840]: I1205 15:17:52.538454 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:17:56 crc kubenswrapper[4840]: I1205 15:17:56.736665 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 05 15:18:01 crc kubenswrapper[4840]: I1205 15:18:01.736642 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 05 15:18:01 crc kubenswrapper[4840]: I1205 15:18:01.737260 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:18:10 crc kubenswrapper[4840]: E1205 15:18:10.720676 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 15:18:10 crc kubenswrapper[4840]: E1205 15:18:10.721550 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nc9h5d8h5c9h56bh59bh78h6ch54bh647h548h5f8hb6h5c6hd4h5b4h8dhdbh679h54ch5bfh664h649h5d7h67ch66dh59h545hd7hbfh696h5d6h587q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qlw2t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-755cc8bd6f-hrt72_openstack(bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:18:10 crc kubenswrapper[4840]: E1205 15:18:10.723821 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-755cc8bd6f-hrt72" podUID="bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe" Dec 05 15:18:10 crc kubenswrapper[4840]: E1205 15:18:10.730049 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 15:18:10 crc kubenswrapper[4840]: E1205 15:18:10.731646 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8fh579hb4hbh68fhbbh67bh84hch77h5fdhb4hdbh56fh94h5fdh669h544h657h555h558h644h85h59h77h7dh7fh95h58ch5bfh577h5q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r7s7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7bd49c98bf-w2wph_openstack(9f5fb305-4844-4628-a5d9-ed4115133a29): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:18:10 crc kubenswrapper[4840]: E1205 15:18:10.734719 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7bd49c98bf-w2wph" podUID="9f5fb305-4844-4628-a5d9-ed4115133a29" Dec 05 15:18:10 crc kubenswrapper[4840]: I1205 15:18:10.803634 4840 generic.go:334] "Generic (PLEG): container finished" podID="509c87f8-fee0-4a27-ad42-91629218a636" containerID="1e063e58e8056843d23d3cce09fd405c30b78db53acd91021026019c84d940a6" exitCode=0 Dec 05 15:18:10 crc kubenswrapper[4840]: I1205 15:18:10.803720 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4n4lf" event={"ID":"509c87f8-fee0-4a27-ad42-91629218a636","Type":"ContainerDied","Data":"1e063e58e8056843d23d3cce09fd405c30b78db53acd91021026019c84d940a6"} Dec 05 15:18:11 crc kubenswrapper[4840]: I1205 15:18:11.148216 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 15:18:11 crc kubenswrapper[4840]: I1205 15:18:11.148259 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 15:18:11 crc kubenswrapper[4840]: I1205 15:18:11.225768 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:11 crc kubenswrapper[4840]: I1205 15:18:11.225848 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:11 crc kubenswrapper[4840]: I1205 15:18:11.736353 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: i/o timeout" Dec 05 15:18:12 crc kubenswrapper[4840]: E1205 15:18:12.545236 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 15:18:12 crc kubenswrapper[4840]: E1205 15:18:12.545690 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfbh695h67fh64ch59ch7ch658h579h5dch698h67ch59h677h5dbhfh5d9h7h5d8hf6h589h8h5d5hcbh66ch87hf5hb7h58dhb8h678h6bh8bq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5pr2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7ff68744f9-zt2wd_openstack(fcd25084-75e8-447b-ba4a-2daa682a9cdb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:18:12 crc kubenswrapper[4840]: E1205 15:18:12.547852 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7ff68744f9-zt2wd" podUID="fcd25084-75e8-447b-ba4a-2daa682a9cdb" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.644152 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.724855 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-scripts\") pod \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.725195 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-config-data\") pod \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.725455 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-credential-keys\") pod \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.725520 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-combined-ca-bundle\") pod \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.725587 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-fernet-keys\") pod \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.725662 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5dsk\" (UniqueName: \"kubernetes.io/projected/6f2ac2cd-d336-4cd8-b381-aa7188c66724-kube-api-access-n5dsk\") pod \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\" (UID: \"6f2ac2cd-d336-4cd8-b381-aa7188c66724\") " Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.732565 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6f2ac2cd-d336-4cd8-b381-aa7188c66724" (UID: "6f2ac2cd-d336-4cd8-b381-aa7188c66724"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.732751 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f2ac2cd-d336-4cd8-b381-aa7188c66724-kube-api-access-n5dsk" (OuterVolumeSpecName: "kube-api-access-n5dsk") pod "6f2ac2cd-d336-4cd8-b381-aa7188c66724" (UID: "6f2ac2cd-d336-4cd8-b381-aa7188c66724"). InnerVolumeSpecName "kube-api-access-n5dsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.735055 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-scripts" (OuterVolumeSpecName: "scripts") pod "6f2ac2cd-d336-4cd8-b381-aa7188c66724" (UID: "6f2ac2cd-d336-4cd8-b381-aa7188c66724"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.743933 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6f2ac2cd-d336-4cd8-b381-aa7188c66724" (UID: "6f2ac2cd-d336-4cd8-b381-aa7188c66724"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.751276 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f2ac2cd-d336-4cd8-b381-aa7188c66724" (UID: "6f2ac2cd-d336-4cd8-b381-aa7188c66724"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.751881 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-config-data" (OuterVolumeSpecName: "config-data") pod "6f2ac2cd-d336-4cd8-b381-aa7188c66724" (UID: "6f2ac2cd-d336-4cd8-b381-aa7188c66724"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.821216 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7vlrn" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.821959 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7vlrn" event={"ID":"6f2ac2cd-d336-4cd8-b381-aa7188c66724","Type":"ContainerDied","Data":"2f1c094ad02fcdaf9a1b731af542be4e35fdd6eb3cf2955db5e4e5812dc95501"} Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.821999 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f1c094ad02fcdaf9a1b731af542be4e35fdd6eb3cf2955db5e4e5812dc95501" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.827936 4840 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.827962 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.827971 4840 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.827979 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5dsk\" (UniqueName: \"kubernetes.io/projected/6f2ac2cd-d336-4cd8-b381-aa7188c66724-kube-api-access-n5dsk\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.827991 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:12 crc kubenswrapper[4840]: I1205 15:18:12.827998 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f2ac2cd-d336-4cd8-b381-aa7188c66724-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:13 crc kubenswrapper[4840]: E1205 15:18:13.108376 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 05 15:18:13 crc kubenswrapper[4840]: E1205 15:18:13.108586 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nf6h5b4hfh644h657h66ch679h559h5ffh56dh689h548h5d5h688h58fh697h597h674h569h656h57fh66dh668h8ch96hb9h6dh579h5dch668h7ch7fq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c82pl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(7a351148-31b1-402a-a8f5-0f26f81fddef): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.736790 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7vlrn"] Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.744462 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7vlrn"] Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.828986 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-4bjv4"] Dec 05 15:18:13 crc kubenswrapper[4840]: E1205 15:18:13.829482 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f2ac2cd-d336-4cd8-b381-aa7188c66724" containerName="keystone-bootstrap" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.829503 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f2ac2cd-d336-4cd8-b381-aa7188c66724" containerName="keystone-bootstrap" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.829696 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f2ac2cd-d336-4cd8-b381-aa7188c66724" containerName="keystone-bootstrap" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.830367 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.834224 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.834901 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.834939 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6bz6r" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.835048 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.835345 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.837084 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4bjv4"] Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.952707 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-credential-keys\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.952851 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcq8n\" (UniqueName: \"kubernetes.io/projected/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-kube-api-access-vcq8n\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.952937 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-scripts\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.953008 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-fernet-keys\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.953041 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-combined-ca-bundle\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:13 crc kubenswrapper[4840]: I1205 15:18:13.953133 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-config-data\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.054926 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcq8n\" (UniqueName: \"kubernetes.io/projected/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-kube-api-access-vcq8n\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.054995 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-scripts\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.055062 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-fernet-keys\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.055081 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-combined-ca-bundle\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.055124 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-config-data\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.055243 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-credential-keys\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.060885 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-credential-keys\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.061005 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-fernet-keys\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.067415 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-scripts\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.069280 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-config-data\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.070994 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-combined-ca-bundle\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.076473 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcq8n\" (UniqueName: \"kubernetes.io/projected/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-kube-api-access-vcq8n\") pod \"keystone-bootstrap-4bjv4\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.080261 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f2ac2cd-d336-4cd8-b381-aa7188c66724" path="/var/lib/kubelet/pods/6f2ac2cd-d336-4cd8-b381-aa7188c66724/volumes" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.150764 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.284509 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.284916 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xq62m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-szwv8_openstack(ae68e2b9-f51d-4486-952d-73c097fbaac4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.286056 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-szwv8" podUID="ae68e2b9-f51d-4486-952d-73c097fbaac4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.365937 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.376306 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.378498 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.397403 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.413924 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461182 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-httpd-run\") pod \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461250 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5fwz\" (UniqueName: \"kubernetes.io/projected/15e31586-1918-4aca-b3cc-eb2e2e6696d5-kube-api-access-k5fwz\") pod \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461339 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-config-data\") pod \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461379 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461441 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-combined-ca-bundle\") pod \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461516 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-logs\") pod \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461572 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-scripts\") pod \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\" (UID: \"15e31586-1918-4aca-b3cc-eb2e2e6696d5\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.461965 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "15e31586-1918-4aca-b3cc-eb2e2e6696d5" (UID: "15e31586-1918-4aca-b3cc-eb2e2e6696d5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.462115 4840 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.466457 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-logs" (OuterVolumeSpecName: "logs") pod "15e31586-1918-4aca-b3cc-eb2e2e6696d5" (UID: "15e31586-1918-4aca-b3cc-eb2e2e6696d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.479593 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "15e31586-1918-4aca-b3cc-eb2e2e6696d5" (UID: "15e31586-1918-4aca-b3cc-eb2e2e6696d5"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.486052 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15e31586-1918-4aca-b3cc-eb2e2e6696d5-kube-api-access-k5fwz" (OuterVolumeSpecName: "kube-api-access-k5fwz") pod "15e31586-1918-4aca-b3cc-eb2e2e6696d5" (UID: "15e31586-1918-4aca-b3cc-eb2e2e6696d5"). InnerVolumeSpecName "kube-api-access-k5fwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.496933 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15e31586-1918-4aca-b3cc-eb2e2e6696d5" (UID: "15e31586-1918-4aca-b3cc-eb2e2e6696d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.497003 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-scripts" (OuterVolumeSpecName: "scripts") pod "15e31586-1918-4aca-b3cc-eb2e2e6696d5" (UID: "15e31586-1918-4aca-b3cc-eb2e2e6696d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.512805 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-config-data" (OuterVolumeSpecName: "config-data") pod "15e31586-1918-4aca-b3cc-eb2e2e6696d5" (UID: "15e31586-1918-4aca-b3cc-eb2e2e6696d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.562856 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-config-data\") pod \"9f5fb305-4844-4628-a5d9-ed4115133a29\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.562916 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-config-data\") pod \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.562943 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-config\") pod \"509c87f8-fee0-4a27-ad42-91629218a636\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.562963 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-nb\") pod \"9ead46a4-d789-4a22-9332-ed2c4f706010\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.562990 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-scripts\") pod \"9f5fb305-4844-4628-a5d9-ed4115133a29\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563059 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-combined-ca-bundle\") pod \"509c87f8-fee0-4a27-ad42-91629218a636\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563086 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-sb\") pod \"9ead46a4-d789-4a22-9332-ed2c4f706010\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563120 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-scripts\") pod \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563145 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-swift-storage-0\") pod \"9ead46a4-d789-4a22-9332-ed2c4f706010\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563169 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-svc\") pod \"9ead46a4-d789-4a22-9332-ed2c4f706010\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563193 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlw2t\" (UniqueName: \"kubernetes.io/projected/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-kube-api-access-qlw2t\") pod \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563211 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsd76\" (UniqueName: \"kubernetes.io/projected/509c87f8-fee0-4a27-ad42-91629218a636-kube-api-access-nsd76\") pod \"509c87f8-fee0-4a27-ad42-91629218a636\" (UID: \"509c87f8-fee0-4a27-ad42-91629218a636\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563258 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7s7n\" (UniqueName: \"kubernetes.io/projected/9f5fb305-4844-4628-a5d9-ed4115133a29-kube-api-access-r7s7n\") pod \"9f5fb305-4844-4628-a5d9-ed4115133a29\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563292 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-logs\") pod \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563308 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnbqz\" (UniqueName: \"kubernetes.io/projected/9ead46a4-d789-4a22-9332-ed2c4f706010-kube-api-access-wnbqz\") pod \"9ead46a4-d789-4a22-9332-ed2c4f706010\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563332 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-horizon-secret-key\") pod \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\" (UID: \"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563396 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-config\") pod \"9ead46a4-d789-4a22-9332-ed2c4f706010\" (UID: \"9ead46a4-d789-4a22-9332-ed2c4f706010\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563425 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f5fb305-4844-4628-a5d9-ed4115133a29-horizon-secret-key\") pod \"9f5fb305-4844-4628-a5d9-ed4115133a29\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563493 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f5fb305-4844-4628-a5d9-ed4115133a29-logs\") pod \"9f5fb305-4844-4628-a5d9-ed4115133a29\" (UID: \"9f5fb305-4844-4628-a5d9-ed4115133a29\") " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563537 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-config-data" (OuterVolumeSpecName: "config-data") pod "9f5fb305-4844-4628-a5d9-ed4115133a29" (UID: "9f5fb305-4844-4628-a5d9-ed4115133a29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563932 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5fwz\" (UniqueName: \"kubernetes.io/projected/15e31586-1918-4aca-b3cc-eb2e2e6696d5-kube-api-access-k5fwz\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563952 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563972 4840 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563981 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563990 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15e31586-1918-4aca-b3cc-eb2e2e6696d5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.563988 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-scripts" (OuterVolumeSpecName: "scripts") pod "9f5fb305-4844-4628-a5d9-ed4115133a29" (UID: "9f5fb305-4844-4628-a5d9-ed4115133a29"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.564001 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15e31586-1918-4aca-b3cc-eb2e2e6696d5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.564042 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.564834 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-config-data" (OuterVolumeSpecName: "config-data") pod "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe" (UID: "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.568515 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe" (UID: "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.568613 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/509c87f8-fee0-4a27-ad42-91629218a636-kube-api-access-nsd76" (OuterVolumeSpecName: "kube-api-access-nsd76") pod "509c87f8-fee0-4a27-ad42-91629218a636" (UID: "509c87f8-fee0-4a27-ad42-91629218a636"). InnerVolumeSpecName "kube-api-access-nsd76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.568637 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f5fb305-4844-4628-a5d9-ed4115133a29-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9f5fb305-4844-4628-a5d9-ed4115133a29" (UID: "9f5fb305-4844-4628-a5d9-ed4115133a29"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.568934 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-scripts" (OuterVolumeSpecName: "scripts") pod "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe" (UID: "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.569626 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f5fb305-4844-4628-a5d9-ed4115133a29-logs" (OuterVolumeSpecName: "logs") pod "9f5fb305-4844-4628-a5d9-ed4115133a29" (UID: "9f5fb305-4844-4628-a5d9-ed4115133a29"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.569796 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-logs" (OuterVolumeSpecName: "logs") pod "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe" (UID: "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.572169 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ead46a4-d789-4a22-9332-ed2c4f706010-kube-api-access-wnbqz" (OuterVolumeSpecName: "kube-api-access-wnbqz") pod "9ead46a4-d789-4a22-9332-ed2c4f706010" (UID: "9ead46a4-d789-4a22-9332-ed2c4f706010"). InnerVolumeSpecName "kube-api-access-wnbqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.572673 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f5fb305-4844-4628-a5d9-ed4115133a29-kube-api-access-r7s7n" (OuterVolumeSpecName: "kube-api-access-r7s7n") pod "9f5fb305-4844-4628-a5d9-ed4115133a29" (UID: "9f5fb305-4844-4628-a5d9-ed4115133a29"). InnerVolumeSpecName "kube-api-access-r7s7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.577729 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-kube-api-access-qlw2t" (OuterVolumeSpecName: "kube-api-access-qlw2t") pod "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe" (UID: "bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe"). InnerVolumeSpecName "kube-api-access-qlw2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.585897 4840 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.591951 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-config" (OuterVolumeSpecName: "config") pod "509c87f8-fee0-4a27-ad42-91629218a636" (UID: "509c87f8-fee0-4a27-ad42-91629218a636"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.593000 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "509c87f8-fee0-4a27-ad42-91629218a636" (UID: "509c87f8-fee0-4a27-ad42-91629218a636"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.610208 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9ead46a4-d789-4a22-9332-ed2c4f706010" (UID: "9ead46a4-d789-4a22-9332-ed2c4f706010"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.610909 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9ead46a4-d789-4a22-9332-ed2c4f706010" (UID: "9ead46a4-d789-4a22-9332-ed2c4f706010"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.614255 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-config" (OuterVolumeSpecName: "config") pod "9ead46a4-d789-4a22-9332-ed2c4f706010" (UID: "9ead46a4-d789-4a22-9332-ed2c4f706010"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.616352 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9ead46a4-d789-4a22-9332-ed2c4f706010" (UID: "9ead46a4-d789-4a22-9332-ed2c4f706010"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.620121 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9ead46a4-d789-4a22-9332-ed2c4f706010" (UID: "9ead46a4-d789-4a22-9332-ed2c4f706010"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665292 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665328 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665342 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665350 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlw2t\" (UniqueName: \"kubernetes.io/projected/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-kube-api-access-qlw2t\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665359 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsd76\" (UniqueName: \"kubernetes.io/projected/509c87f8-fee0-4a27-ad42-91629218a636-kube-api-access-nsd76\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665369 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7s7n\" (UniqueName: \"kubernetes.io/projected/9f5fb305-4844-4628-a5d9-ed4115133a29-kube-api-access-r7s7n\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665378 4840 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665386 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665395 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnbqz\" (UniqueName: \"kubernetes.io/projected/9ead46a4-d789-4a22-9332-ed2c4f706010-kube-api-access-wnbqz\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665402 4840 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665412 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665420 4840 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9f5fb305-4844-4628-a5d9-ed4115133a29-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665427 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f5fb305-4844-4628-a5d9-ed4115133a29-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665459 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665470 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665478 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665486 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f5fb305-4844-4628-a5d9-ed4115133a29-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665503 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509c87f8-fee0-4a27-ad42-91629218a636-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.665514 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ead46a4-d789-4a22-9332-ed2c4f706010-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.731938 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b99464548-lx7k9"] Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.842788 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4n4lf" event={"ID":"509c87f8-fee0-4a27-ad42-91629218a636","Type":"ContainerDied","Data":"5a58954597eafc012f677075b09d3db83d4921e085f0b7c9f561b4acca9dbdc2"} Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.842835 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a58954597eafc012f677075b09d3db83d4921e085f0b7c9f561b4acca9dbdc2" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.842903 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4n4lf" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.854344 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"15e31586-1918-4aca-b3cc-eb2e2e6696d5","Type":"ContainerDied","Data":"bea34ae1bd9cdce9f9b5d3488c14aa9264ed00ffc67f21bb6a3889e328d1fd52"} Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.854406 4840 scope.go:117] "RemoveContainer" containerID="619117a4d30ec0de772a2813f293735076515512351f1d69c8cfca5655056a1d" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.854585 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.858852 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" event={"ID":"9ead46a4-d789-4a22-9332-ed2c4f706010","Type":"ContainerDied","Data":"ad23933e2087ad98dbb272819deb866d3aa69a2ff7457c9e5e5c4b6983bf276d"} Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.858902 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.863087 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-755cc8bd6f-hrt72" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.863078 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-755cc8bd6f-hrt72" event={"ID":"bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe","Type":"ContainerDied","Data":"7feb31533728811b38c81fe181c214d79f5e6506e66bb635b7e2f671b4968f29"} Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.865612 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bd49c98bf-w2wph" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.865620 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bd49c98bf-w2wph" event={"ID":"9f5fb305-4844-4628-a5d9-ed4115133a29","Type":"ContainerDied","Data":"ba9952dd0f91cad05b5a6b439d028d5dc6657851e6f755ec670ce783fc702ed1"} Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.867091 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-szwv8" podUID="ae68e2b9-f51d-4486-952d-73c097fbaac4" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.916274 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-5n2bl"] Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.923365 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-5n2bl"] Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.934072 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.946692 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.946924 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f878p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-5ccg9_openstack(5731646f-d8c6-4bfd-b815-3d68c244d801): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.948071 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-5ccg9" podUID="5731646f-d8c6-4bfd-b815-3d68c244d801" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.960921 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.971255 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.972904 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.973290 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-httpd" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973307 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-httpd" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.973335 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-log" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973341 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-log" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.973352 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-httpd" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973358 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-httpd" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.973375 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="509c87f8-fee0-4a27-ad42-91629218a636" containerName="neutron-db-sync" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973385 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="509c87f8-fee0-4a27-ad42-91629218a636" containerName="neutron-db-sync" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.973394 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973402 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.973416 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="init" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973423 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="init" Dec 05 15:18:14 crc kubenswrapper[4840]: E1205 15:18:14.973437 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-log" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973445 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-log" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973628 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="509c87f8-fee0-4a27-ad42-91629218a636" containerName="neutron-db-sync" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973647 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-log" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973656 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" containerName="glance-httpd" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973667 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-log" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973678 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" containerName="glance-httpd" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.973691 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.976225 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.981977 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.982132 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 15:18:14 crc kubenswrapper[4840]: I1205 15:18:14.987334 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:14.999924 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-755cc8bd6f-hrt72"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.012118 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.021269 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-755cc8bd6f-hrt72"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.036540 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7bd49c98bf-w2wph"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.041551 4840 scope.go:117] "RemoveContainer" containerID="56bbb72ebdbd7200e23113548fe03f37091594b9e8a497428b7f84a2f03afec4" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.053233 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7bd49c98bf-w2wph"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.070687 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fcd25084-75e8-447b-ba4a-2daa682a9cdb-horizon-secret-key\") pod \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.070745 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-scripts\") pod \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.070777 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9bv9\" (UniqueName: \"kubernetes.io/projected/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-kube-api-access-q9bv9\") pod \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.070898 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.070949 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-config-data\") pod \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.070972 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-scripts\") pod \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071033 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-logs\") pod \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071113 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-httpd-run\") pod \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071133 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pr2g\" (UniqueName: \"kubernetes.io/projected/fcd25084-75e8-447b-ba4a-2daa682a9cdb-kube-api-access-5pr2g\") pod \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071154 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-combined-ca-bundle\") pod \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\" (UID: \"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071175 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcd25084-75e8-447b-ba4a-2daa682a9cdb-logs\") pod \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071195 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-config-data\") pod \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\" (UID: \"fcd25084-75e8-447b-ba4a-2daa682a9cdb\") " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071378 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff6zr\" (UniqueName: \"kubernetes.io/projected/3c46fc46-e681-4b04-8367-3a9f6e0283a6-kube-api-access-ff6zr\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071412 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-logs\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071434 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071465 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-config-data\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071481 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071498 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071518 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-scripts\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.071592 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.077914 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcd25084-75e8-447b-ba4a-2daa682a9cdb-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fcd25084-75e8-447b-ba4a-2daa682a9cdb" (UID: "fcd25084-75e8-447b-ba4a-2daa682a9cdb"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.082024 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-logs" (OuterVolumeSpecName: "logs") pod "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" (UID: "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.082267 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" (UID: "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.082523 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-scripts" (OuterVolumeSpecName: "scripts") pod "fcd25084-75e8-447b-ba4a-2daa682a9cdb" (UID: "fcd25084-75e8-447b-ba4a-2daa682a9cdb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.082580 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-config-data" (OuterVolumeSpecName: "config-data") pod "fcd25084-75e8-447b-ba4a-2daa682a9cdb" (UID: "fcd25084-75e8-447b-ba4a-2daa682a9cdb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.085042 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcd25084-75e8-447b-ba4a-2daa682a9cdb-kube-api-access-5pr2g" (OuterVolumeSpecName: "kube-api-access-5pr2g") pod "fcd25084-75e8-447b-ba4a-2daa682a9cdb" (UID: "fcd25084-75e8-447b-ba4a-2daa682a9cdb"). InnerVolumeSpecName "kube-api-access-5pr2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.086405 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-scripts" (OuterVolumeSpecName: "scripts") pod "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" (UID: "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.087199 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" (UID: "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.088054 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcd25084-75e8-447b-ba4a-2daa682a9cdb-logs" (OuterVolumeSpecName: "logs") pod "fcd25084-75e8-447b-ba4a-2daa682a9cdb" (UID: "fcd25084-75e8-447b-ba4a-2daa682a9cdb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.090282 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-kube-api-access-q9bv9" (OuterVolumeSpecName: "kube-api-access-q9bv9") pod "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" (UID: "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6"). InnerVolumeSpecName "kube-api-access-q9bv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.109829 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" (UID: "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.169658 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-config-data" (OuterVolumeSpecName: "config-data") pod "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" (UID: "c6480440-9c3d-4541-b5f1-baaeb1e4d6f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173064 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff6zr\" (UniqueName: \"kubernetes.io/projected/3c46fc46-e681-4b04-8367-3a9f6e0283a6-kube-api-access-ff6zr\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173122 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-logs\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173148 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173177 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-config-data\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173193 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173208 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173226 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-scripts\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173315 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173366 4840 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fcd25084-75e8-447b-ba4a-2daa682a9cdb-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173376 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173385 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9bv9\" (UniqueName: \"kubernetes.io/projected/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-kube-api-access-q9bv9\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173408 4840 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173417 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173425 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173448 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173464 4840 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173500 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pr2g\" (UniqueName: \"kubernetes.io/projected/fcd25084-75e8-447b-ba4a-2daa682a9cdb-kube-api-access-5pr2g\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173509 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173517 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcd25084-75e8-447b-ba4a-2daa682a9cdb-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.173525 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fcd25084-75e8-447b-ba4a-2daa682a9cdb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.174520 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-logs\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.174727 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.179069 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.183390 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-scripts\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.183509 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.191985 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-config-data\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.192708 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.213912 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff6zr\" (UniqueName: \"kubernetes.io/projected/3c46fc46-e681-4b04-8367-3a9f6e0283a6-kube-api-access-ff6zr\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.223414 4840 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.255793 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.274915 4840 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.303909 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.534500 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67c65cc6bd-s49k5"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.595894 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4bjv4"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.616964 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-rl6pm"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.618493 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.632198 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-rl6pm"] Dec 05 15:18:15 crc kubenswrapper[4840]: W1205 15:18:15.678975 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c43fa4f_ab9c_4ffb_8945_b7d8ac52a705.slice/crio-9ae9aa166f48428c079cee1263ce1644ce6aa6b128175a999f9c53aaf01f4813 WatchSource:0}: Error finding container 9ae9aa166f48428c079cee1263ce1644ce6aa6b128175a999f9c53aaf01f4813: Status 404 returned error can't find the container with id 9ae9aa166f48428c079cee1263ce1644ce6aa6b128175a999f9c53aaf01f4813 Dec 05 15:18:15 crc kubenswrapper[4840]: W1205 15:18:15.692038 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15d9aa0d_7948_40b8_a043_e6d36f2a5bb1.slice/crio-11b305fc9905553ba62ea67f93ee1484bc661879ec03c79e6a74b22159ffd04d WatchSource:0}: Error finding container 11b305fc9905553ba62ea67f93ee1484bc661879ec03c79e6a74b22159ffd04d: Status 404 returned error can't find the container with id 11b305fc9905553ba62ea67f93ee1484bc661879ec03c79e6a74b22159ffd04d Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.757905 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-65db99bfb4-5mjgn"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.759631 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.764849 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.765127 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-2fq2v" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.765318 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.765507 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.769391 4840 scope.go:117] "RemoveContainer" containerID="1ce06db264cce1f0192fc6f568105da48de1b8a6728582327e6c7e9d6134f2be" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.774268 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-65db99bfb4-5mjgn"] Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.786463 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.786560 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.786621 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlqff\" (UniqueName: \"kubernetes.io/projected/e7360138-33f0-4582-86d8-4eced23bb05b-kube-api-access-jlqff\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.786660 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.786693 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-config\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.786778 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.832366 4840 scope.go:117] "RemoveContainer" containerID="fa67b30a2a132c2e14a6d351bd18491f383d29afd2d680144173f660fd021649" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.885510 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7ff68744f9-zt2wd" event={"ID":"fcd25084-75e8-447b-ba4a-2daa682a9cdb","Type":"ContainerDied","Data":"8e9a99a48f86a8c1d18bea7341a9f8f479ac8f609b66948a75dac9b3d48a2b60"} Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.885537 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7ff68744f9-zt2wd" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.891151 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlqff\" (UniqueName: \"kubernetes.io/projected/e7360138-33f0-4582-86d8-4eced23bb05b-kube-api-access-jlqff\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.891207 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhgts\" (UniqueName: \"kubernetes.io/projected/15fbee23-d910-410f-ae76-91c967143dca-kube-api-access-lhgts\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.891235 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.891257 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-config\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.891264 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99464548-lx7k9" event={"ID":"d384faa1-3b3b-45f8-bf4b-902236ec40da","Type":"ContainerStarted","Data":"0de8e2927a45287b3fee6d846a69eaffe597747b90b95fddbfd6ce18c072f979"} Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.891279 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-ovndb-tls-certs\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.893621 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.893759 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-config\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.893804 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.893951 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-config\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.893975 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-combined-ca-bundle\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.894217 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-httpd-config\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.894353 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.895464 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.895971 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.896286 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.898424 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.906538 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4bjv4" event={"ID":"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705","Type":"ContainerStarted","Data":"9ae9aa166f48428c079cee1263ce1644ce6aa6b128175a999f9c53aaf01f4813"} Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.914428 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlqff\" (UniqueName: \"kubernetes.io/projected/e7360138-33f0-4582-86d8-4eced23bb05b-kube-api-access-jlqff\") pod \"dnsmasq-dns-5ccc5c4795-rl6pm\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.915779 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"c6480440-9c3d-4541-b5f1-baaeb1e4d6f6","Type":"ContainerDied","Data":"4cc11f691f77d7deab7b576a674cd1c617fb8afff29f93977369b021065a51b4"} Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.915825 4840 scope.go:117] "RemoveContainer" containerID="16a4ed2f0d915b6604fb95d8188ec9107b859bf66ea7c94959c3d34ee0f6cf61" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.915957 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.932918 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67c65cc6bd-s49k5" event={"ID":"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1","Type":"ContainerStarted","Data":"11b305fc9905553ba62ea67f93ee1484bc661879ec03c79e6a74b22159ffd04d"} Dec 05 15:18:15 crc kubenswrapper[4840]: E1205 15:18:15.941751 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-5ccg9" podUID="5731646f-d8c6-4bfd-b815-3d68c244d801" Dec 05 15:18:15 crc kubenswrapper[4840]: I1205 15:18:15.942632 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:15.999816 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7ff68744f9-zt2wd"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.000096 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhgts\" (UniqueName: \"kubernetes.io/projected/15fbee23-d910-410f-ae76-91c967143dca-kube-api-access-lhgts\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.000256 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-ovndb-tls-certs\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.000368 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-config\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.000428 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-combined-ca-bundle\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.000453 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-httpd-config\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.006259 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-config\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.013040 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-httpd-config\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.033496 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-ovndb-tls-certs\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.034229 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7ff68744f9-zt2wd"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.034947 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhgts\" (UniqueName: \"kubernetes.io/projected/15fbee23-d910-410f-ae76-91c967143dca-kube-api-access-lhgts\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.040002 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-combined-ca-bundle\") pod \"neutron-65db99bfb4-5mjgn\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.054018 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.058414 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.092608 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.105055 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15e31586-1918-4aca-b3cc-eb2e2e6696d5" path="/var/lib/kubelet/pods/15e31586-1918-4aca-b3cc-eb2e2e6696d5/volumes" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.111262 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" path="/var/lib/kubelet/pods/9ead46a4-d789-4a22-9332-ed2c4f706010/volumes" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.111961 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f5fb305-4844-4628-a5d9-ed4115133a29" path="/var/lib/kubelet/pods/9f5fb305-4844-4628-a5d9-ed4115133a29/volumes" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.114485 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe" path="/var/lib/kubelet/pods/bdad4f80-e6c9-4c6e-bd7b-b99aaf0bc3fe/volumes" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.115223 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6480440-9c3d-4541-b5f1-baaeb1e4d6f6" path="/var/lib/kubelet/pods/c6480440-9c3d-4541-b5f1-baaeb1e4d6f6/volumes" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.117004 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcd25084-75e8-447b-ba4a-2daa682a9cdb" path="/var/lib/kubelet/pods/fcd25084-75e8-447b-ba4a-2daa682a9cdb/volumes" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.117370 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.118704 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.118788 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.127376 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.125859 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.179081 4840 scope.go:117] "RemoveContainer" containerID="1aea1aa0b08d267942f211593146a9caedafc50a42884b731963033ed9a43f45" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203266 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-logs\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203313 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203361 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203389 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203474 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203520 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rjf2\" (UniqueName: \"kubernetes.io/projected/231dd866-8fc7-4d81-a391-8cfc74561bd6-kube-api-access-8rjf2\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203548 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.203567 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.304553 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.304923 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.304971 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rjf2\" (UniqueName: \"kubernetes.io/projected/231dd866-8fc7-4d81-a391-8cfc74561bd6-kube-api-access-8rjf2\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.304992 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.305013 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.305036 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-logs\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.305057 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.305099 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.305895 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.306242 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.306924 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-logs\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.312273 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.313766 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.315070 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.315393 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.325482 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rjf2\" (UniqueName: \"kubernetes.io/projected/231dd866-8fc7-4d81-a391-8cfc74561bd6-kube-api-access-8rjf2\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.363817 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.465204 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:18:16 crc kubenswrapper[4840]: W1205 15:18:16.481413 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c46fc46_e681_4b04_8367_3a9f6e0283a6.slice/crio-9783f868f7a9fc5db95e6ed77f4f78beb6e992bdfe4576f05db7cb88995bafa6 WatchSource:0}: Error finding container 9783f868f7a9fc5db95e6ed77f4f78beb6e992bdfe4576f05db7cb88995bafa6: Status 404 returned error can't find the container with id 9783f868f7a9fc5db95e6ed77f4f78beb6e992bdfe4576f05db7cb88995bafa6 Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.633916 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-rl6pm"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.658055 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.736858 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-5n2bl" podUID="9ead46a4-d789-4a22-9332-ed2c4f706010" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: i/o timeout" Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.966874 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-65db99bfb4-5mjgn"] Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.969426 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" event={"ID":"e7360138-33f0-4582-86d8-4eced23bb05b","Type":"ContainerStarted","Data":"d6cfe33c066236e015cad0adf77c3226649ea187926b47451a7d2601eb6dfec8"} Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.971944 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67c65cc6bd-s49k5" event={"ID":"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1","Type":"ContainerStarted","Data":"0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65"} Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.978612 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dfgrm" event={"ID":"dab20e36-d38f-4c5f-9d42-028c9df5ca51","Type":"ContainerStarted","Data":"53024aebaef310348e49018d50f7757dfd8bc4eee143e1f7bdc94038d4eacdac"} Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.981167 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99464548-lx7k9" event={"ID":"d384faa1-3b3b-45f8-bf4b-902236ec40da","Type":"ContainerStarted","Data":"5762a1e071db4d50f60bd5b26122b9fd0d9917bb3f2e933982c5c9a7ad723519"} Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.989896 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerStarted","Data":"959c31b1df96cbf3954e3394e2e3259bbf0a197a97f5e49d032d347c25242994"} Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.994672 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4bjv4" event={"ID":"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705","Type":"ContainerStarted","Data":"48529d1c7e0e70882af343ccb984e6ec4a3d9734cd0bcbdc9f334b455947d1a7"} Dec 05 15:18:16 crc kubenswrapper[4840]: I1205 15:18:16.998935 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-dfgrm" podStartSLOduration=4.972287163 podStartE2EDuration="36.998921794s" podCreationTimestamp="2025-12-05 15:17:40 +0000 UTC" firstStartedPulling="2025-12-05 15:17:42.22854275 +0000 UTC m=+1140.569605364" lastFinishedPulling="2025-12-05 15:18:14.255177381 +0000 UTC m=+1172.596239995" observedRunningTime="2025-12-05 15:18:16.995744754 +0000 UTC m=+1175.336807368" watchObservedRunningTime="2025-12-05 15:18:16.998921794 +0000 UTC m=+1175.339984408" Dec 05 15:18:17 crc kubenswrapper[4840]: I1205 15:18:17.010495 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3c46fc46-e681-4b04-8367-3a9f6e0283a6","Type":"ContainerStarted","Data":"9783f868f7a9fc5db95e6ed77f4f78beb6e992bdfe4576f05db7cb88995bafa6"} Dec 05 15:18:17 crc kubenswrapper[4840]: I1205 15:18:17.017469 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-4bjv4" podStartSLOduration=4.017451009 podStartE2EDuration="4.017451009s" podCreationTimestamp="2025-12-05 15:18:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:17.015700249 +0000 UTC m=+1175.356762853" watchObservedRunningTime="2025-12-05 15:18:17.017451009 +0000 UTC m=+1175.358513623" Dec 05 15:18:17 crc kubenswrapper[4840]: I1205 15:18:17.416344 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.104907 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"231dd866-8fc7-4d81-a391-8cfc74561bd6","Type":"ContainerStarted","Data":"c5c0520f4be89bba2d8927849e9c3c69fce59a386350bb5a92ce37907aa8dcd9"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.113299 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3c46fc46-e681-4b04-8367-3a9f6e0283a6","Type":"ContainerStarted","Data":"7ce3d9c4d79a99efd6c6f4dd67e5809882b393c8c67152d5af4ff938c2b706bd"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.117003 4840 generic.go:334] "Generic (PLEG): container finished" podID="e7360138-33f0-4582-86d8-4eced23bb05b" containerID="b97bd6ff2f33bebebb36f2a3a0029ee2722d4522e2500c7f7f4de263dbb526a4" exitCode=0 Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.117143 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" event={"ID":"e7360138-33f0-4582-86d8-4eced23bb05b","Type":"ContainerDied","Data":"b97bd6ff2f33bebebb36f2a3a0029ee2722d4522e2500c7f7f4de263dbb526a4"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.157276 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67c65cc6bd-s49k5" event={"ID":"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1","Type":"ContainerStarted","Data":"d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.186099 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b99464548-lx7k9" event={"ID":"d384faa1-3b3b-45f8-bf4b-902236ec40da","Type":"ContainerStarted","Data":"9059eed07c1a8578e26e578cb7093efda39ca4367a283ba430b484c4c1805f6c"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.193704 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-67c65cc6bd-s49k5" podStartSLOduration=26.647286831 podStartE2EDuration="27.193686081s" podCreationTimestamp="2025-12-05 15:17:51 +0000 UTC" firstStartedPulling="2025-12-05 15:18:15.701589383 +0000 UTC m=+1174.042651997" lastFinishedPulling="2025-12-05 15:18:16.247988633 +0000 UTC m=+1174.589051247" observedRunningTime="2025-12-05 15:18:18.187243889 +0000 UTC m=+1176.528306513" watchObservedRunningTime="2025-12-05 15:18:18.193686081 +0000 UTC m=+1176.534748695" Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.206967 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65db99bfb4-5mjgn" event={"ID":"15fbee23-d910-410f-ae76-91c967143dca","Type":"ContainerStarted","Data":"4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.207032 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65db99bfb4-5mjgn" event={"ID":"15fbee23-d910-410f-ae76-91c967143dca","Type":"ContainerStarted","Data":"f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.207046 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65db99bfb4-5mjgn" event={"ID":"15fbee23-d910-410f-ae76-91c967143dca","Type":"ContainerStarted","Data":"4f8a8f04509fce79d9c7415f91da743c1660732807eb3ad63e826b04c4263ef2"} Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.214547 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b99464548-lx7k9" podStartSLOduration=25.355918981 podStartE2EDuration="26.214530721s" podCreationTimestamp="2025-12-05 15:17:52 +0000 UTC" firstStartedPulling="2025-12-05 15:18:15.041564735 +0000 UTC m=+1173.382627349" lastFinishedPulling="2025-12-05 15:18:15.900176475 +0000 UTC m=+1174.241239089" observedRunningTime="2025-12-05 15:18:18.214194722 +0000 UTC m=+1176.555257336" watchObservedRunningTime="2025-12-05 15:18:18.214530721 +0000 UTC m=+1176.555593335" Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.242672 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-65db99bfb4-5mjgn" podStartSLOduration=3.242651488 podStartE2EDuration="3.242651488s" podCreationTimestamp="2025-12-05 15:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:18.230230236 +0000 UTC m=+1176.571292850" watchObservedRunningTime="2025-12-05 15:18:18.242651488 +0000 UTC m=+1176.583714102" Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.920839 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6d557dcbfc-24qsg"] Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.922735 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.924858 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.925089 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 15:18:18 crc kubenswrapper[4840]: I1205 15:18:18.935989 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d557dcbfc-24qsg"] Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.100605 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-config\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.101031 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-internal-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.101122 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-httpd-config\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.101147 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-combined-ca-bundle\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.101193 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-ovndb-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.101229 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-public-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.101259 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmt7s\" (UniqueName: \"kubernetes.io/projected/3f464dff-9cae-4492-9e99-7d0343ecefbe-kube-api-access-dmt7s\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.202822 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-httpd-config\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.202907 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-combined-ca-bundle\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.202983 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-ovndb-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.203068 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-public-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.203090 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmt7s\" (UniqueName: \"kubernetes.io/projected/3f464dff-9cae-4492-9e99-7d0343ecefbe-kube-api-access-dmt7s\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.203181 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-config\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.203246 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-internal-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.216057 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-internal-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.216581 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-public-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.217056 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-httpd-config\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.220073 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-ovndb-tls-certs\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.220834 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-config\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.227929 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmt7s\" (UniqueName: \"kubernetes.io/projected/3f464dff-9cae-4492-9e99-7d0343ecefbe-kube-api-access-dmt7s\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.228265 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f464dff-9cae-4492-9e99-7d0343ecefbe-combined-ca-bundle\") pod \"neutron-6d557dcbfc-24qsg\" (UID: \"3f464dff-9cae-4492-9e99-7d0343ecefbe\") " pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.234077 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"231dd866-8fc7-4d81-a391-8cfc74561bd6","Type":"ContainerStarted","Data":"7caadfdbb2233ff8d9af4ed20fed744814ac639a14bb366f509bc793300360b8"} Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.241096 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3c46fc46-e681-4b04-8367-3a9f6e0283a6","Type":"ContainerStarted","Data":"64af2d24858b2beb279e44aa97a9422ef24169572f22c557b1db4008a94b96ed"} Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.270225 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" event={"ID":"e7360138-33f0-4582-86d8-4eced23bb05b","Type":"ContainerStarted","Data":"1822e2ad6028e2ca5c08e888e8eda32e67a7addb69d98f2ebd705cc067aaa107"} Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.271327 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.272712 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.283042 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.283021033 podStartE2EDuration="5.283021033s" podCreationTimestamp="2025-12-05 15:18:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:19.271678221 +0000 UTC m=+1177.612740835" watchObservedRunningTime="2025-12-05 15:18:19.283021033 +0000 UTC m=+1177.624083637" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.292207 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" podStartSLOduration=4.292190322 podStartE2EDuration="4.292190322s" podCreationTimestamp="2025-12-05 15:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:19.289225928 +0000 UTC m=+1177.630288542" watchObservedRunningTime="2025-12-05 15:18:19.292190322 +0000 UTC m=+1177.633252936" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.317897 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.471668 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.471723 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.471768 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.472377 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40a488d86bcc9fa72aa671ce4746fbea89ae7ae377bf2c9aff3cd9df6bf2d02c"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:18:19 crc kubenswrapper[4840]: I1205 15:18:19.472427 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://40a488d86bcc9fa72aa671ce4746fbea89ae7ae377bf2c9aff3cd9df6bf2d02c" gracePeriod=600 Dec 05 15:18:20 crc kubenswrapper[4840]: I1205 15:18:20.645390 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d557dcbfc-24qsg"] Dec 05 15:18:21 crc kubenswrapper[4840]: I1205 15:18:21.477046 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d557dcbfc-24qsg" event={"ID":"3f464dff-9cae-4492-9e99-7d0343ecefbe","Type":"ContainerStarted","Data":"dc14874c0dbbc369417d6661905d8dbe1d54660752109a97ec214e42f77eca9f"} Dec 05 15:18:21 crc kubenswrapper[4840]: I1205 15:18:21.477657 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d557dcbfc-24qsg" event={"ID":"3f464dff-9cae-4492-9e99-7d0343ecefbe","Type":"ContainerStarted","Data":"e5b46c6ad45ed52e512ec2224c8c0bb534ade859843d3561a30d15c6a278965a"} Dec 05 15:18:21 crc kubenswrapper[4840]: I1205 15:18:21.480450 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="40a488d86bcc9fa72aa671ce4746fbea89ae7ae377bf2c9aff3cd9df6bf2d02c" exitCode=0 Dec 05 15:18:21 crc kubenswrapper[4840]: I1205 15:18:21.481711 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"40a488d86bcc9fa72aa671ce4746fbea89ae7ae377bf2c9aff3cd9df6bf2d02c"} Dec 05 15:18:21 crc kubenswrapper[4840]: I1205 15:18:21.481740 4840 scope.go:117] "RemoveContainer" containerID="d69a7eb1c40b94e45105da8261bb07a2f04367f59caef02ba4c0e3aa6dc28e33" Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.434417 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.434826 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.502176 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"231dd866-8fc7-4d81-a391-8cfc74561bd6","Type":"ContainerStarted","Data":"f7c846a940e475e325e21699b3a1056c61b5233111e0b0b7c113450ca39dae31"} Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.511071 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d557dcbfc-24qsg" event={"ID":"3f464dff-9cae-4492-9e99-7d0343ecefbe","Type":"ContainerStarted","Data":"ba6a3a37b663285e82df84c63064b9090f85b6fc303d40dd02ac12da0a8acf24"} Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.511914 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.530020 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.529997063 podStartE2EDuration="7.529997063s" podCreationTimestamp="2025-12-05 15:18:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:22.52460516 +0000 UTC m=+1180.865667774" watchObservedRunningTime="2025-12-05 15:18:22.529997063 +0000 UTC m=+1180.871059677" Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.539097 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.539950 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:18:22 crc kubenswrapper[4840]: I1205 15:18:22.568976 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6d557dcbfc-24qsg" podStartSLOduration=4.568958156 podStartE2EDuration="4.568958156s" podCreationTimestamp="2025-12-05 15:18:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:22.559007614 +0000 UTC m=+1180.900070228" watchObservedRunningTime="2025-12-05 15:18:22.568958156 +0000 UTC m=+1180.910020770" Dec 05 15:18:24 crc kubenswrapper[4840]: I1205 15:18:24.567156 4840 generic.go:334] "Generic (PLEG): container finished" podID="1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" containerID="48529d1c7e0e70882af343ccb984e6ec4a3d9734cd0bcbdc9f334b455947d1a7" exitCode=0 Dec 05 15:18:24 crc kubenswrapper[4840]: I1205 15:18:24.567240 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4bjv4" event={"ID":"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705","Type":"ContainerDied","Data":"48529d1c7e0e70882af343ccb984e6ec4a3d9734cd0bcbdc9f334b455947d1a7"} Dec 05 15:18:24 crc kubenswrapper[4840]: I1205 15:18:24.569915 4840 generic.go:334] "Generic (PLEG): container finished" podID="dab20e36-d38f-4c5f-9d42-028c9df5ca51" containerID="53024aebaef310348e49018d50f7757dfd8bc4eee143e1f7bdc94038d4eacdac" exitCode=0 Dec 05 15:18:24 crc kubenswrapper[4840]: I1205 15:18:24.569983 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dfgrm" event={"ID":"dab20e36-d38f-4c5f-9d42-028c9df5ca51","Type":"ContainerDied","Data":"53024aebaef310348e49018d50f7757dfd8bc4eee143e1f7bdc94038d4eacdac"} Dec 05 15:18:25 crc kubenswrapper[4840]: I1205 15:18:25.304457 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 15:18:25 crc kubenswrapper[4840]: I1205 15:18:25.304794 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 15:18:25 crc kubenswrapper[4840]: I1205 15:18:25.346409 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 15:18:25 crc kubenswrapper[4840]: I1205 15:18:25.352660 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 15:18:25 crc kubenswrapper[4840]: I1205 15:18:25.579366 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 15:18:25 crc kubenswrapper[4840]: I1205 15:18:25.579428 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 15:18:25 crc kubenswrapper[4840]: I1205 15:18:25.945991 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.023537 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-8jg2f"] Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.023806 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" podUID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerName="dnsmasq-dns" containerID="cri-o://1e626ca15f78583df8491eb4b1e3e042fa53b4bdbe9a9e625733b1b9bd2b0168" gracePeriod=10 Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.374436 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.403528 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dfgrm" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492058 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6p8p\" (UniqueName: \"kubernetes.io/projected/dab20e36-d38f-4c5f-9d42-028c9df5ca51-kube-api-access-m6p8p\") pod \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492642 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-scripts\") pod \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492704 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dab20e36-d38f-4c5f-9d42-028c9df5ca51-logs\") pod \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492761 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-fernet-keys\") pod \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492786 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-config-data\") pod \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492807 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcq8n\" (UniqueName: \"kubernetes.io/projected/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-kube-api-access-vcq8n\") pod \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492839 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-combined-ca-bundle\") pod \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492876 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-combined-ca-bundle\") pod \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492906 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-config-data\") pod \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.492928 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-credential-keys\") pod \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\" (UID: \"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.493023 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-scripts\") pod \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\" (UID: \"dab20e36-d38f-4c5f-9d42-028c9df5ca51\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.493680 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dab20e36-d38f-4c5f-9d42-028c9df5ca51-logs" (OuterVolumeSpecName: "logs") pod "dab20e36-d38f-4c5f-9d42-028c9df5ca51" (UID: "dab20e36-d38f-4c5f-9d42-028c9df5ca51"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.494160 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dab20e36-d38f-4c5f-9d42-028c9df5ca51-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.508362 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-scripts" (OuterVolumeSpecName: "scripts") pod "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" (UID: "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.509784 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" (UID: "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.511031 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" (UID: "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.514606 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dab20e36-d38f-4c5f-9d42-028c9df5ca51-kube-api-access-m6p8p" (OuterVolumeSpecName: "kube-api-access-m6p8p") pod "dab20e36-d38f-4c5f-9d42-028c9df5ca51" (UID: "dab20e36-d38f-4c5f-9d42-028c9df5ca51"). InnerVolumeSpecName "kube-api-access-m6p8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.519052 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-kube-api-access-vcq8n" (OuterVolumeSpecName: "kube-api-access-vcq8n") pod "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" (UID: "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705"). InnerVolumeSpecName "kube-api-access-vcq8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.519194 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-scripts" (OuterVolumeSpecName: "scripts") pod "dab20e36-d38f-4c5f-9d42-028c9df5ca51" (UID: "dab20e36-d38f-4c5f-9d42-028c9df5ca51"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.542561 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-config-data" (OuterVolumeSpecName: "config-data") pod "dab20e36-d38f-4c5f-9d42-028c9df5ca51" (UID: "dab20e36-d38f-4c5f-9d42-028c9df5ca51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.562081 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-config-data" (OuterVolumeSpecName: "config-data") pod "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" (UID: "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.596988 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"40055bc2420888638e5359189b9a9948a0cbd2aa70e5300e22d9c614bb6d6f19"} Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597115 4840 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597159 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597169 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcq8n\" (UniqueName: \"kubernetes.io/projected/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-kube-api-access-vcq8n\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597179 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597187 4840 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597195 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597223 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6p8p\" (UniqueName: \"kubernetes.io/projected/dab20e36-d38f-4c5f-9d42-028c9df5ca51-kube-api-access-m6p8p\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.597232 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.603098 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dab20e36-d38f-4c5f-9d42-028c9df5ca51" (UID: "dab20e36-d38f-4c5f-9d42-028c9df5ca51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.618405 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerStarted","Data":"adfee7236fefc58166b18948e7fb8930431957f89c8f13358d96962c6f1a4fde"} Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.618630 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.620016 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" (UID: "1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.631146 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4bjv4" event={"ID":"1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705","Type":"ContainerDied","Data":"9ae9aa166f48428c079cee1263ce1644ce6aa6b128175a999f9c53aaf01f4813"} Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.631187 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ae9aa166f48428c079cee1263ce1644ce6aa6b128175a999f9c53aaf01f4813" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.631251 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4bjv4" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.641239 4840 generic.go:334] "Generic (PLEG): container finished" podID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerID="1e626ca15f78583df8491eb4b1e3e042fa53b4bdbe9a9e625733b1b9bd2b0168" exitCode=0 Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.641363 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.641742 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-8jg2f" event={"ID":"7c611c21-c0e0-4af2-a5a7-195807a6fcfa","Type":"ContainerDied","Data":"1e626ca15f78583df8491eb4b1e3e042fa53b4bdbe9a9e625733b1b9bd2b0168"} Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.641777 4840 scope.go:117] "RemoveContainer" containerID="1e626ca15f78583df8491eb4b1e3e042fa53b4bdbe9a9e625733b1b9bd2b0168" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.653433 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dfgrm" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.654949 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dfgrm" event={"ID":"dab20e36-d38f-4c5f-9d42-028c9df5ca51","Type":"ContainerDied","Data":"ab0e1190c85519f927dcda003f61da4aaff3fac944c4f43b264a510f53c0802f"} Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.654996 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab0e1190c85519f927dcda003f61da4aaff3fac944c4f43b264a510f53c0802f" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.658695 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.658727 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.700238 4840 scope.go:117] "RemoveContainer" containerID="2efc93813c355b880154dfa4921ded0b1a54c5748e138457ab48162127ee44c2" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.701351 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.701373 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dab20e36-d38f-4c5f-9d42-028c9df5ca51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.716103 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.741565 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.784083 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7b75bf6fbf-wj8dt"] Dec 05 15:18:26 crc kubenswrapper[4840]: E1205 15:18:26.784616 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerName="init" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.784638 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerName="init" Dec 05 15:18:26 crc kubenswrapper[4840]: E1205 15:18:26.784651 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerName="dnsmasq-dns" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.784660 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerName="dnsmasq-dns" Dec 05 15:18:26 crc kubenswrapper[4840]: E1205 15:18:26.784669 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" containerName="keystone-bootstrap" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.784678 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" containerName="keystone-bootstrap" Dec 05 15:18:26 crc kubenswrapper[4840]: E1205 15:18:26.784712 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dab20e36-d38f-4c5f-9d42-028c9df5ca51" containerName="placement-db-sync" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.784720 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="dab20e36-d38f-4c5f-9d42-028c9df5ca51" containerName="placement-db-sync" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.784989 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" containerName="keystone-bootstrap" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.785013 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" containerName="dnsmasq-dns" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.785035 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="dab20e36-d38f-4c5f-9d42-028c9df5ca51" containerName="placement-db-sync" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.785736 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.789393 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.789586 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.789738 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.789846 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6bz6r" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.789976 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.790104 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.800637 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7b75bf6fbf-wj8dt"] Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.802658 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-nb\") pod \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.802746 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-config\") pod \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.802904 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-sb\") pod \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.802958 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-swift-storage-0\") pod \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.803102 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-svc\") pod \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.803142 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bwgj\" (UniqueName: \"kubernetes.io/projected/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-kube-api-access-8bwgj\") pod \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\" (UID: \"7c611c21-c0e0-4af2-a5a7-195807a6fcfa\") " Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.820724 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-kube-api-access-8bwgj" (OuterVolumeSpecName: "kube-api-access-8bwgj") pod "7c611c21-c0e0-4af2-a5a7-195807a6fcfa" (UID: "7c611c21-c0e0-4af2-a5a7-195807a6fcfa"). InnerVolumeSpecName "kube-api-access-8bwgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.862586 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5d878656b-vljqr"] Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.897582 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.903518 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.903705 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.903832 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-mkgbz" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.904006 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.904152 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.917121 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7c611c21-c0e0-4af2-a5a7-195807a6fcfa" (UID: "7c611c21-c0e0-4af2-a5a7-195807a6fcfa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.918056 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-config" (OuterVolumeSpecName: "config") pod "7c611c21-c0e0-4af2-a5a7-195807a6fcfa" (UID: "7c611c21-c0e0-4af2-a5a7-195807a6fcfa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926467 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-internal-tls-certs\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926574 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-scripts\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926699 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtnbb\" (UniqueName: \"kubernetes.io/projected/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-kube-api-access-xtnbb\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926734 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-config-data\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926802 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-fernet-keys\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926828 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-public-tls-certs\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926847 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-combined-ca-bundle\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926918 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-credential-keys\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.926988 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.927002 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bwgj\" (UniqueName: \"kubernetes.io/projected/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-kube-api-access-8bwgj\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.927013 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.936215 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7c611c21-c0e0-4af2-a5a7-195807a6fcfa" (UID: "7c611c21-c0e0-4af2-a5a7-195807a6fcfa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.947911 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7c611c21-c0e0-4af2-a5a7-195807a6fcfa" (UID: "7c611c21-c0e0-4af2-a5a7-195807a6fcfa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.947978 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5d878656b-vljqr"] Dec 05 15:18:26 crc kubenswrapper[4840]: I1205 15:18:26.974484 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7c611c21-c0e0-4af2-a5a7-195807a6fcfa" (UID: "7c611c21-c0e0-4af2-a5a7-195807a6fcfa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029039 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-combined-ca-bundle\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029109 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-credential-keys\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029146 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-internal-tls-certs\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029203 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-config-data\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029235 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-scripts\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029280 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-logs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029306 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-internal-tls-certs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029347 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j2dp\" (UniqueName: \"kubernetes.io/projected/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-kube-api-access-9j2dp\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029365 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-public-tls-certs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029393 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtnbb\" (UniqueName: \"kubernetes.io/projected/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-kube-api-access-xtnbb\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029414 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-config-data\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029455 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-scripts\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029480 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-fernet-keys\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029501 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-public-tls-certs\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029519 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-combined-ca-bundle\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029562 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029574 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.029584 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c611c21-c0e0-4af2-a5a7-195807a6fcfa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.034661 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-combined-ca-bundle\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.037162 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-scripts\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.041630 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-credential-keys\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.041723 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-fernet-keys\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.042032 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-public-tls-certs\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.042192 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-internal-tls-certs\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.042966 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-config-data\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.056249 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtnbb\" (UniqueName: \"kubernetes.io/projected/c98e01c4-b177-45c9-9f0b-bd02f90fe5d2-kube-api-access-xtnbb\") pod \"keystone-7b75bf6fbf-wj8dt\" (UID: \"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2\") " pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.131522 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-scripts\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.131598 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-combined-ca-bundle\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.131678 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-config-data\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.131932 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.132155 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-logs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.132634 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-logs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.132674 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-internal-tls-certs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.133050 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j2dp\" (UniqueName: \"kubernetes.io/projected/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-kube-api-access-9j2dp\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.133079 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-public-tls-certs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.134654 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-scripts\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.135372 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-combined-ca-bundle\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.136513 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-public-tls-certs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.137818 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-config-data\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.155264 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-internal-tls-certs\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.164536 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j2dp\" (UniqueName: \"kubernetes.io/projected/dc5ac202-1b33-4a65-aab2-d5fe6e62f844-kube-api-access-9j2dp\") pod \"placement-5d878656b-vljqr\" (UID: \"dc5ac202-1b33-4a65-aab2-d5fe6e62f844\") " pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.215452 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.386776 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-8jg2f"] Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.409206 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-8jg2f"] Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.678922 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7b75bf6fbf-wj8dt"] Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.684575 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.684893 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:27 crc kubenswrapper[4840]: I1205 15:18:27.961721 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5d878656b-vljqr"] Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.102772 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c611c21-c0e0-4af2-a5a7-195807a6fcfa" path="/var/lib/kubelet/pods/7c611c21-c0e0-4af2-a5a7-195807a6fcfa/volumes" Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.395211 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.395761 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.405879 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.743232 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5d878656b-vljqr" event={"ID":"dc5ac202-1b33-4a65-aab2-d5fe6e62f844","Type":"ContainerStarted","Data":"62d8b0801d88561f83afa587ae27d62d0c02a59b36aac9c4656fc6a7f09b8739"} Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.743549 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5d878656b-vljqr" event={"ID":"dc5ac202-1b33-4a65-aab2-d5fe6e62f844","Type":"ContainerStarted","Data":"260f5136792443dd24293f7884ed87534295714f88de0ed89856e5b370b211b6"} Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.746083 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b75bf6fbf-wj8dt" event={"ID":"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2","Type":"ContainerStarted","Data":"09601ff53da4d85d3139b3b796ce51fb5fb7d73070d71e2803f66cdf2f2c7f61"} Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.746123 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b75bf6fbf-wj8dt" event={"ID":"c98e01c4-b177-45c9-9f0b-bd02f90fe5d2","Type":"ContainerStarted","Data":"1613d8ce4dfdd67141229238b7204e8bc85a47b5517bf74de72757f7aee47433"} Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.746795 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:28 crc kubenswrapper[4840]: I1205 15:18:28.774464 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7b75bf6fbf-wj8dt" podStartSLOduration=2.774435621 podStartE2EDuration="2.774435621s" podCreationTimestamp="2025-12-05 15:18:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:28.768629406 +0000 UTC m=+1187.109692020" watchObservedRunningTime="2025-12-05 15:18:28.774435621 +0000 UTC m=+1187.115498245" Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.757519 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ccg9" event={"ID":"5731646f-d8c6-4bfd-b815-3d68c244d801","Type":"ContainerStarted","Data":"bc012b34148f49f693654bc7d62d6926573b82d7141cfc524069457314892e7f"} Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.765696 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5d878656b-vljqr" event={"ID":"dc5ac202-1b33-4a65-aab2-d5fe6e62f844","Type":"ContainerStarted","Data":"dcb26482a39440da752b3577e7d0b56aa587eb4c2d76d3eebcdc935ec318ab69"} Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.765739 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.765803 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.765811 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.767126 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.815688 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-5ccg9" podStartSLOduration=3.009278404 podStartE2EDuration="49.81567066s" podCreationTimestamp="2025-12-05 15:17:40 +0000 UTC" firstStartedPulling="2025-12-05 15:17:42.005787683 +0000 UTC m=+1140.346850297" lastFinishedPulling="2025-12-05 15:18:28.812179939 +0000 UTC m=+1187.153242553" observedRunningTime="2025-12-05 15:18:29.787168633 +0000 UTC m=+1188.128231247" watchObservedRunningTime="2025-12-05 15:18:29.81567066 +0000 UTC m=+1188.156733274" Dec 05 15:18:29 crc kubenswrapper[4840]: I1205 15:18:29.818337 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5d878656b-vljqr" podStartSLOduration=3.818320745 podStartE2EDuration="3.818320745s" podCreationTimestamp="2025-12-05 15:18:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:29.813458997 +0000 UTC m=+1188.154521611" watchObservedRunningTime="2025-12-05 15:18:29.818320745 +0000 UTC m=+1188.159383359" Dec 05 15:18:30 crc kubenswrapper[4840]: I1205 15:18:30.968383 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:30 crc kubenswrapper[4840]: I1205 15:18:30.969106 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 15:18:32 crc kubenswrapper[4840]: I1205 15:18:32.436536 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-67c65cc6bd-s49k5" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 15:18:32 crc kubenswrapper[4840]: I1205 15:18:32.546033 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99464548-lx7k9" podUID="d384faa1-3b3b-45f8-bf4b-902236ec40da" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Dec 05 15:18:32 crc kubenswrapper[4840]: I1205 15:18:32.912451 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-szwv8" event={"ID":"ae68e2b9-f51d-4486-952d-73c097fbaac4","Type":"ContainerStarted","Data":"0b4a51fac2376cf863d44ecaf5e8beb125f8aaa08e41afa6db8327aa01728bdf"} Dec 05 15:18:32 crc kubenswrapper[4840]: I1205 15:18:32.916196 4840 generic.go:334] "Generic (PLEG): container finished" podID="5731646f-d8c6-4bfd-b815-3d68c244d801" containerID="bc012b34148f49f693654bc7d62d6926573b82d7141cfc524069457314892e7f" exitCode=0 Dec 05 15:18:32 crc kubenswrapper[4840]: I1205 15:18:32.916258 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ccg9" event={"ID":"5731646f-d8c6-4bfd-b815-3d68c244d801","Type":"ContainerDied","Data":"bc012b34148f49f693654bc7d62d6926573b82d7141cfc524069457314892e7f"} Dec 05 15:18:32 crc kubenswrapper[4840]: I1205 15:18:32.938525 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-szwv8" podStartSLOduration=4.302368433 podStartE2EDuration="53.938503396s" podCreationTimestamp="2025-12-05 15:17:39 +0000 UTC" firstStartedPulling="2025-12-05 15:17:41.315315404 +0000 UTC m=+1139.656378018" lastFinishedPulling="2025-12-05 15:18:30.951450367 +0000 UTC m=+1189.292512981" observedRunningTime="2025-12-05 15:18:32.931766865 +0000 UTC m=+1191.272829509" watchObservedRunningTime="2025-12-05 15:18:32.938503396 +0000 UTC m=+1191.279566010" Dec 05 15:18:35 crc kubenswrapper[4840]: I1205 15:18:35.804033 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:18:35 crc kubenswrapper[4840]: I1205 15:18:35.951346 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ccg9" event={"ID":"5731646f-d8c6-4bfd-b815-3d68c244d801","Type":"ContainerDied","Data":"cfc1e130db365a8bab56b07781f2885c5dac04ebb6e6613324f3e4d663e9a6eb"} Dec 05 15:18:35 crc kubenswrapper[4840]: I1205 15:18:35.951390 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfc1e130db365a8bab56b07781f2885c5dac04ebb6e6613324f3e4d663e9a6eb" Dec 05 15:18:35 crc kubenswrapper[4840]: I1205 15:18:35.951388 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ccg9" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.002402 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f878p\" (UniqueName: \"kubernetes.io/projected/5731646f-d8c6-4bfd-b815-3d68c244d801-kube-api-access-f878p\") pod \"5731646f-d8c6-4bfd-b815-3d68c244d801\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.002484 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-combined-ca-bundle\") pod \"5731646f-d8c6-4bfd-b815-3d68c244d801\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.002759 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-db-sync-config-data\") pod \"5731646f-d8c6-4bfd-b815-3d68c244d801\" (UID: \"5731646f-d8c6-4bfd-b815-3d68c244d801\") " Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.032448 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5731646f-d8c6-4bfd-b815-3d68c244d801" (UID: "5731646f-d8c6-4bfd-b815-3d68c244d801"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.036055 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5731646f-d8c6-4bfd-b815-3d68c244d801" (UID: "5731646f-d8c6-4bfd-b815-3d68c244d801"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.045538 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5731646f-d8c6-4bfd-b815-3d68c244d801-kube-api-access-f878p" (OuterVolumeSpecName: "kube-api-access-f878p") pod "5731646f-d8c6-4bfd-b815-3d68c244d801" (UID: "5731646f-d8c6-4bfd-b815-3d68c244d801"). InnerVolumeSpecName "kube-api-access-f878p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.105328 4840 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.105375 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f878p\" (UniqueName: \"kubernetes.io/projected/5731646f-d8c6-4bfd-b815-3d68c244d801-kube-api-access-f878p\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.105390 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731646f-d8c6-4bfd-b815-3d68c244d801-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:36 crc kubenswrapper[4840]: E1205 15:18:36.623533 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.961848 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerStarted","Data":"4962356fe991ffa318ba7d812198dd9cfb24ee68301defa6743104c44f2c4238"} Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.962004 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="ceilometer-notification-agent" containerID="cri-o://959c31b1df96cbf3954e3394e2e3259bbf0a197a97f5e49d032d347c25242994" gracePeriod=30 Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.962149 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="proxy-httpd" containerID="cri-o://4962356fe991ffa318ba7d812198dd9cfb24ee68301defa6743104c44f2c4238" gracePeriod=30 Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.962202 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="sg-core" containerID="cri-o://adfee7236fefc58166b18948e7fb8930431957f89c8f13358d96962c6f1a4fde" gracePeriod=30 Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.962018 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.965083 4840 generic.go:334] "Generic (PLEG): container finished" podID="ae68e2b9-f51d-4486-952d-73c097fbaac4" containerID="0b4a51fac2376cf863d44ecaf5e8beb125f8aaa08e41afa6db8327aa01728bdf" exitCode=0 Dec 05 15:18:36 crc kubenswrapper[4840]: I1205 15:18:36.965347 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-szwv8" event={"ID":"ae68e2b9-f51d-4486-952d-73c097fbaac4","Type":"ContainerDied","Data":"0b4a51fac2376cf863d44ecaf5e8beb125f8aaa08e41afa6db8327aa01728bdf"} Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.163442 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-67bdc7dbc-8r6xf"] Dec 05 15:18:37 crc kubenswrapper[4840]: E1205 15:18:37.164021 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5731646f-d8c6-4bfd-b815-3d68c244d801" containerName="barbican-db-sync" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.164048 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="5731646f-d8c6-4bfd-b815-3d68c244d801" containerName="barbican-db-sync" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.164293 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="5731646f-d8c6-4bfd-b815-3d68c244d801" containerName="barbican-db-sync" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.165600 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.168257 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nz6z9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.168377 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.169104 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.207805 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-57cfd5878-qwpfg"] Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.209778 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.216759 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.217695 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-57cfd5878-qwpfg"] Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.249427 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-67bdc7dbc-8r6xf"] Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.251448 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq2vp\" (UniqueName: \"kubernetes.io/projected/7002d387-4756-4e68-b238-6e9cbf1d9b10-kube-api-access-kq2vp\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.262805 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9670574b-79d6-495e-abc8-123bf1582742-logs\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263104 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-config-data\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263230 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-combined-ca-bundle\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263278 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-combined-ca-bundle\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263326 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmctm\" (UniqueName: \"kubernetes.io/projected/9670574b-79d6-495e-abc8-123bf1582742-kube-api-access-jmctm\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263385 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-config-data-custom\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263411 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7002d387-4756-4e68-b238-6e9cbf1d9b10-logs\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263508 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-config-data\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.263632 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-config-data-custom\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.415964 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-config-data-custom\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416040 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq2vp\" (UniqueName: \"kubernetes.io/projected/7002d387-4756-4e68-b238-6e9cbf1d9b10-kube-api-access-kq2vp\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416075 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9670574b-79d6-495e-abc8-123bf1582742-logs\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416133 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-config-data\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416228 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-combined-ca-bundle\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416257 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-combined-ca-bundle\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416281 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmctm\" (UniqueName: \"kubernetes.io/projected/9670574b-79d6-495e-abc8-123bf1582742-kube-api-access-jmctm\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416320 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-config-data-custom\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416336 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7002d387-4756-4e68-b238-6e9cbf1d9b10-logs\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.416419 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-config-data\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.419687 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9670574b-79d6-495e-abc8-123bf1582742-logs\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.420012 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7002d387-4756-4e68-b238-6e9cbf1d9b10-logs\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.426799 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-config-data-custom\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.428032 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-combined-ca-bundle\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.430997 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-config-data\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.432145 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9670574b-79d6-495e-abc8-123bf1582742-combined-ca-bundle\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.434511 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-config-data-custom\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.451737 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7002d387-4756-4e68-b238-6e9cbf1d9b10-config-data\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.462134 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-2sl8l"] Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.464125 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.467692 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq2vp\" (UniqueName: \"kubernetes.io/projected/7002d387-4756-4e68-b238-6e9cbf1d9b10-kube-api-access-kq2vp\") pod \"barbican-keystone-listener-57cfd5878-qwpfg\" (UID: \"7002d387-4756-4e68-b238-6e9cbf1d9b10\") " pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.477036 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmctm\" (UniqueName: \"kubernetes.io/projected/9670574b-79d6-495e-abc8-123bf1582742-kube-api-access-jmctm\") pod \"barbican-worker-67bdc7dbc-8r6xf\" (UID: \"9670574b-79d6-495e-abc8-123bf1582742\") " pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.484566 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-2sl8l"] Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.566772 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-67bdc7dbc-8r6xf" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.569504 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-57c5d544f8-4d7j9"] Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.572595 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.577074 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.582111 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.588942 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57c5d544f8-4d7j9"] Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.620944 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-config\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.621042 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.621084 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ddjx\" (UniqueName: \"kubernetes.io/projected/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-kube-api-access-7ddjx\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.621140 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-svc\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.621158 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.621189 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.722541 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-config\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.722622 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data-custom\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.722673 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.722707 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6906a25-b26e-4510-aa75-2227ff1f3e1a-logs\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.722747 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ddjx\" (UniqueName: \"kubernetes.io/projected/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-kube-api-access-7ddjx\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.722801 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tms8x\" (UniqueName: \"kubernetes.io/projected/c6906a25-b26e-4510-aa75-2227ff1f3e1a-kube-api-access-tms8x\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.722839 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.723012 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-svc\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.723041 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.723064 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-combined-ca-bundle\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.723119 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.724357 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.726923 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.729388 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-svc\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.730206 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.732002 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-config\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.749304 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ddjx\" (UniqueName: \"kubernetes.io/projected/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-kube-api-access-7ddjx\") pod \"dnsmasq-dns-688c87cc99-2sl8l\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.825786 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tms8x\" (UniqueName: \"kubernetes.io/projected/c6906a25-b26e-4510-aa75-2227ff1f3e1a-kube-api-access-tms8x\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.825934 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.826083 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-combined-ca-bundle\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.826252 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data-custom\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.826334 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6906a25-b26e-4510-aa75-2227ff1f3e1a-logs\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.827258 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6906a25-b26e-4510-aa75-2227ff1f3e1a-logs\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.854748 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.855439 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.859631 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data-custom\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.866841 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tms8x\" (UniqueName: \"kubernetes.io/projected/c6906a25-b26e-4510-aa75-2227ff1f3e1a-kube-api-access-tms8x\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.893195 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-combined-ca-bundle\") pod \"barbican-api-57c5d544f8-4d7j9\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.922644 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-57cfd5878-qwpfg"] Dec 05 15:18:37 crc kubenswrapper[4840]: W1205 15:18:37.928204 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7002d387_4756_4e68_b238_6e9cbf1d9b10.slice/crio-f329edf4cab28ac598249812577830696c0ea1dce63b7801ad3edde6bd4b81e6 WatchSource:0}: Error finding container f329edf4cab28ac598249812577830696c0ea1dce63b7801ad3edde6bd4b81e6: Status 404 returned error can't find the container with id f329edf4cab28ac598249812577830696c0ea1dce63b7801ad3edde6bd4b81e6 Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.945848 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.975987 4840 generic.go:334] "Generic (PLEG): container finished" podID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerID="4962356fe991ffa318ba7d812198dd9cfb24ee68301defa6743104c44f2c4238" exitCode=0 Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.976024 4840 generic.go:334] "Generic (PLEG): container finished" podID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerID="adfee7236fefc58166b18948e7fb8930431957f89c8f13358d96962c6f1a4fde" exitCode=2 Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.976073 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerDied","Data":"4962356fe991ffa318ba7d812198dd9cfb24ee68301defa6743104c44f2c4238"} Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.976105 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerDied","Data":"adfee7236fefc58166b18948e7fb8930431957f89c8f13358d96962c6f1a4fde"} Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.977889 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" event={"ID":"7002d387-4756-4e68-b238-6e9cbf1d9b10","Type":"ContainerStarted","Data":"f329edf4cab28ac598249812577830696c0ea1dce63b7801ad3edde6bd4b81e6"} Dec 05 15:18:37 crc kubenswrapper[4840]: I1205 15:18:37.982369 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-67bdc7dbc-8r6xf"] Dec 05 15:18:37 crc kubenswrapper[4840]: W1205 15:18:37.995542 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9670574b_79d6_495e_abc8_123bf1582742.slice/crio-43a9cee4e11fbd5c1b06898293a155b3c4e27b6a63aaeef2b723a0c4aa1686fc WatchSource:0}: Error finding container 43a9cee4e11fbd5c1b06898293a155b3c4e27b6a63aaeef2b723a0c4aa1686fc: Status 404 returned error can't find the container with id 43a9cee4e11fbd5c1b06898293a155b3c4e27b6a63aaeef2b723a0c4aa1686fc Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.311569 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-szwv8" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.444559 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-scripts\") pod \"ae68e2b9-f51d-4486-952d-73c097fbaac4\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.444823 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq62m\" (UniqueName: \"kubernetes.io/projected/ae68e2b9-f51d-4486-952d-73c097fbaac4-kube-api-access-xq62m\") pod \"ae68e2b9-f51d-4486-952d-73c097fbaac4\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.444966 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-combined-ca-bundle\") pod \"ae68e2b9-f51d-4486-952d-73c097fbaac4\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.445165 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-config-data\") pod \"ae68e2b9-f51d-4486-952d-73c097fbaac4\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.445194 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-db-sync-config-data\") pod \"ae68e2b9-f51d-4486-952d-73c097fbaac4\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.445402 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae68e2b9-f51d-4486-952d-73c097fbaac4-etc-machine-id\") pod \"ae68e2b9-f51d-4486-952d-73c097fbaac4\" (UID: \"ae68e2b9-f51d-4486-952d-73c097fbaac4\") " Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.446320 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ae68e2b9-f51d-4486-952d-73c097fbaac4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ae68e2b9-f51d-4486-952d-73c097fbaac4" (UID: "ae68e2b9-f51d-4486-952d-73c097fbaac4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.446820 4840 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ae68e2b9-f51d-4486-952d-73c097fbaac4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.453381 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ae68e2b9-f51d-4486-952d-73c097fbaac4" (UID: "ae68e2b9-f51d-4486-952d-73c097fbaac4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.453435 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae68e2b9-f51d-4486-952d-73c097fbaac4-kube-api-access-xq62m" (OuterVolumeSpecName: "kube-api-access-xq62m") pod "ae68e2b9-f51d-4486-952d-73c097fbaac4" (UID: "ae68e2b9-f51d-4486-952d-73c097fbaac4"). InnerVolumeSpecName "kube-api-access-xq62m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.453402 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-scripts" (OuterVolumeSpecName: "scripts") pod "ae68e2b9-f51d-4486-952d-73c097fbaac4" (UID: "ae68e2b9-f51d-4486-952d-73c097fbaac4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.476630 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae68e2b9-f51d-4486-952d-73c097fbaac4" (UID: "ae68e2b9-f51d-4486-952d-73c097fbaac4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.482405 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-2sl8l"] Dec 05 15:18:38 crc kubenswrapper[4840]: W1205 15:18:38.483408 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a917f9e_3ca3_49c8_9e7f_f6b7b2835961.slice/crio-0a82acdaf4311493bde615f18b5a8d7451eab5a9277985977c6a4df91e7e1096 WatchSource:0}: Error finding container 0a82acdaf4311493bde615f18b5a8d7451eab5a9277985977c6a4df91e7e1096: Status 404 returned error can't find the container with id 0a82acdaf4311493bde615f18b5a8d7451eab5a9277985977c6a4df91e7e1096 Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.513049 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-config-data" (OuterVolumeSpecName: "config-data") pod "ae68e2b9-f51d-4486-952d-73c097fbaac4" (UID: "ae68e2b9-f51d-4486-952d-73c097fbaac4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.606517 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.606554 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq62m\" (UniqueName: \"kubernetes.io/projected/ae68e2b9-f51d-4486-952d-73c097fbaac4-kube-api-access-xq62m\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.606572 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.606584 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.606595 4840 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ae68e2b9-f51d-4486-952d-73c097fbaac4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.646209 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57c5d544f8-4d7j9"] Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.989255 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" event={"ID":"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961","Type":"ContainerStarted","Data":"0a82acdaf4311493bde615f18b5a8d7451eab5a9277985977c6a4df91e7e1096"} Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.990364 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-67bdc7dbc-8r6xf" event={"ID":"9670574b-79d6-495e-abc8-123bf1582742","Type":"ContainerStarted","Data":"43a9cee4e11fbd5c1b06898293a155b3c4e27b6a63aaeef2b723a0c4aa1686fc"} Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.994456 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-szwv8" event={"ID":"ae68e2b9-f51d-4486-952d-73c097fbaac4","Type":"ContainerDied","Data":"b0df462856c5e1122ba18273113d0b7e4c0338a486a192a363945f18f2c26289"} Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.994485 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0df462856c5e1122ba18273113d0b7e4c0338a486a192a363945f18f2c26289" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.994521 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-szwv8" Dec 05 15:18:38 crc kubenswrapper[4840]: I1205 15:18:38.995765 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c5d544f8-4d7j9" event={"ID":"c6906a25-b26e-4510-aa75-2227ff1f3e1a","Type":"ContainerStarted","Data":"5cc55b0dd5245dadd032529eb173e9ebe2e0bebc3ec59cb7a42b9488ae017015"} Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.337691 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:39 crc kubenswrapper[4840]: E1205 15:18:39.338689 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae68e2b9-f51d-4486-952d-73c097fbaac4" containerName="cinder-db-sync" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.338705 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae68e2b9-f51d-4486-952d-73c097fbaac4" containerName="cinder-db-sync" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.338953 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae68e2b9-f51d-4486-952d-73c097fbaac4" containerName="cinder-db-sync" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.340134 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.343076 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.343493 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.343682 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-n52t2" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.343829 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.345052 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.425118 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j58k\" (UniqueName: \"kubernetes.io/projected/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-kube-api-access-5j58k\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.425248 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-scripts\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.425382 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.425448 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.425518 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.425666 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.453814 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-2sl8l"] Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.519757 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-6ccth"] Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.521743 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.528881 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.528941 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.528971 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-config\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529003 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j58k\" (UniqueName: \"kubernetes.io/projected/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-kube-api-access-5j58k\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529021 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-scripts\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529058 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529079 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529134 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2w5h\" (UniqueName: \"kubernetes.io/projected/4be94020-016c-4262-ade5-165fe35d6a2c-kube-api-access-q2w5h\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529164 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529192 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529217 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529247 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.529325 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.534838 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-scripts\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.548062 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.548674 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.553624 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-6ccth"] Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.560898 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.568257 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j58k\" (UniqueName: \"kubernetes.io/projected/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-kube-api-access-5j58k\") pod \"cinder-scheduler-0\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.600463 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.635507 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.643044 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.648149 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2w5h\" (UniqueName: \"kubernetes.io/projected/4be94020-016c-4262-ade5-165fe35d6a2c-kube-api-access-q2w5h\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.648252 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.656822 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.642970 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.657461 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.657913 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-config\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.658270 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.658739 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-config\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.659124 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.698339 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2w5h\" (UniqueName: \"kubernetes.io/projected/4be94020-016c-4262-ade5-165fe35d6a2c-kube-api-access-q2w5h\") pod \"dnsmasq-dns-6bb4fc677f-6ccth\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.778294 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.779908 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.783055 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.785442 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.863103 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-scripts\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.863744 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data-custom\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.863895 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.864070 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58a6240b-dbe5-4208-b070-3b84e319505d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.864184 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bll58\" (UniqueName: \"kubernetes.io/projected/58a6240b-dbe5-4208-b070-3b84e319505d-kube-api-access-bll58\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.864312 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58a6240b-dbe5-4208-b070-3b84e319505d-logs\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.864379 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.924839 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.966367 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-scripts\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.966436 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.966467 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data-custom\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.966514 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58a6240b-dbe5-4208-b070-3b84e319505d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.966563 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bll58\" (UniqueName: \"kubernetes.io/projected/58a6240b-dbe5-4208-b070-3b84e319505d-kube-api-access-bll58\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.966647 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58a6240b-dbe5-4208-b070-3b84e319505d-logs\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.966669 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.967332 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58a6240b-dbe5-4208-b070-3b84e319505d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.968205 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58a6240b-dbe5-4208-b070-3b84e319505d-logs\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.975405 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data-custom\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.976123 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-scripts\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.976648 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:39 crc kubenswrapper[4840]: I1205 15:18:39.984553 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.022542 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bll58\" (UniqueName: \"kubernetes.io/projected/58a6240b-dbe5-4208-b070-3b84e319505d-kube-api-access-bll58\") pod \"cinder-api-0\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " pod="openstack/cinder-api-0" Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.028031 4840 generic.go:334] "Generic (PLEG): container finished" podID="1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" containerID="b062243dd82dff14694d1d4b9b714a500a4fdd56aa6d295152e2dd7f7d57ecac" exitCode=0 Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.028180 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" event={"ID":"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961","Type":"ContainerDied","Data":"b062243dd82dff14694d1d4b9b714a500a4fdd56aa6d295152e2dd7f7d57ecac"} Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.058525 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c5d544f8-4d7j9" event={"ID":"c6906a25-b26e-4510-aa75-2227ff1f3e1a","Type":"ContainerStarted","Data":"8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9"} Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.058577 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c5d544f8-4d7j9" event={"ID":"c6906a25-b26e-4510-aa75-2227ff1f3e1a","Type":"ContainerStarted","Data":"268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f"} Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.060833 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.060889 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.122427 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-57c5d544f8-4d7j9" podStartSLOduration=3.122402841 podStartE2EDuration="3.122402841s" podCreationTimestamp="2025-12-05 15:18:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:40.09165454 +0000 UTC m=+1198.432717154" watchObservedRunningTime="2025-12-05 15:18:40.122402841 +0000 UTC m=+1198.463465455" Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.138055 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.162289 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 15:18:40 crc kubenswrapper[4840]: W1205 15:18:40.181186 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1632c9a_90cf_49f9_9cbe_96b1b1b53e7c.slice/crio-92d4245cba7871671c5e58eed3b86235154fa8947917a8fe6f8545d600ef5fd9 WatchSource:0}: Error finding container 92d4245cba7871671c5e58eed3b86235154fa8947917a8fe6f8545d600ef5fd9: Status 404 returned error can't find the container with id 92d4245cba7871671c5e58eed3b86235154fa8947917a8fe6f8545d600ef5fd9 Dec 05 15:18:40 crc kubenswrapper[4840]: E1205 15:18:40.387269 4840 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 05 15:18:40 crc kubenswrapper[4840]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 15:18:40 crc kubenswrapper[4840]: > podSandboxID="0a82acdaf4311493bde615f18b5a8d7451eab5a9277985977c6a4df91e7e1096" Dec 05 15:18:40 crc kubenswrapper[4840]: E1205 15:18:40.388038 4840 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 05 15:18:40 crc kubenswrapper[4840]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n567h69h57fh656h5dh545h7h595h5bhb9h5cbh548h575h68ch689h5cdhd6h5c6hd6h5c6h546h9h657h688h56h5cch9bh585h56fh64ch587h647q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7ddjx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-688c87cc99-2sl8l_openstack(1a917f9e-3ca3-49c8-9e7f-f6b7b2835961): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 15:18:40 crc kubenswrapper[4840]: > logger="UnhandledError" Dec 05 15:18:40 crc kubenswrapper[4840]: E1205 15:18:40.393123 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" podUID="1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.498904 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-6ccth"] Dec 05 15:18:40 crc kubenswrapper[4840]: I1205 15:18:40.763779 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:18:41 crc kubenswrapper[4840]: W1205 15:18:41.071690 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58a6240b_dbe5_4208_b070_3b84e319505d.slice/crio-1ea439f28076dfee011a418a573809ac4dd164a81212247235ad85d05557fccc WatchSource:0}: Error finding container 1ea439f28076dfee011a418a573809ac4dd164a81212247235ad85d05557fccc: Status 404 returned error can't find the container with id 1ea439f28076dfee011a418a573809ac4dd164a81212247235ad85d05557fccc Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.074628 4840 generic.go:334] "Generic (PLEG): container finished" podID="4be94020-016c-4262-ade5-165fe35d6a2c" containerID="6abaca6d1e9694835ca66f3efac358460688024cd305583826a05c14fe064105" exitCode=0 Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.074693 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" event={"ID":"4be94020-016c-4262-ade5-165fe35d6a2c","Type":"ContainerDied","Data":"6abaca6d1e9694835ca66f3efac358460688024cd305583826a05c14fe064105"} Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.074719 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" event={"ID":"4be94020-016c-4262-ade5-165fe35d6a2c","Type":"ContainerStarted","Data":"61ecb5ae14c60ae1487f9cfd8112d482bb378dbd8fef707bea915aab6429a085"} Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.078436 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c","Type":"ContainerStarted","Data":"92d4245cba7871671c5e58eed3b86235154fa8947917a8fe6f8545d600ef5fd9"} Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.822796 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.908038 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ddjx\" (UniqueName: \"kubernetes.io/projected/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-kube-api-access-7ddjx\") pod \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.908285 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-nb\") pod \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.908377 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-sb\") pod \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.908433 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-config\") pod \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.908473 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-svc\") pod \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.908502 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-swift-storage-0\") pod \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\" (UID: \"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961\") " Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.922075 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-kube-api-access-7ddjx" (OuterVolumeSpecName: "kube-api-access-7ddjx") pod "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" (UID: "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961"). InnerVolumeSpecName "kube-api-access-7ddjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:41 crc kubenswrapper[4840]: I1205 15:18:41.991441 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" (UID: "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.011376 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ddjx\" (UniqueName: \"kubernetes.io/projected/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-kube-api-access-7ddjx\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.011404 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.015659 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" (UID: "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.019529 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" (UID: "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.020741 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-config" (OuterVolumeSpecName: "config") pod "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" (UID: "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.042241 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" (UID: "1a917f9e-3ca3-49c8-9e7f-f6b7b2835961"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.135010 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.135112 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.135130 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.135153 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.166344 4840 generic.go:334] "Generic (PLEG): container finished" podID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerID="959c31b1df96cbf3954e3394e2e3259bbf0a197a97f5e49d032d347c25242994" exitCode=0 Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.166411 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerDied","Data":"959c31b1df96cbf3954e3394e2e3259bbf0a197a97f5e49d032d347c25242994"} Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.166438 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7a351148-31b1-402a-a8f5-0f26f81fddef","Type":"ContainerDied","Data":"7e5759ba821fbc43654734a2d29a8808ba206f616abf3e4e535d13d4ee4578bb"} Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.166450 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e5759ba821fbc43654734a2d29a8808ba206f616abf3e4e535d13d4ee4578bb" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.183679 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" event={"ID":"1a917f9e-3ca3-49c8-9e7f-f6b7b2835961","Type":"ContainerDied","Data":"0a82acdaf4311493bde615f18b5a8d7451eab5a9277985977c6a4df91e7e1096"} Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.183739 4840 scope.go:117] "RemoveContainer" containerID="b062243dd82dff14694d1d4b9b714a500a4fdd56aa6d295152e2dd7f7d57ecac" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.183921 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-2sl8l" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.190159 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58a6240b-dbe5-4208-b070-3b84e319505d","Type":"ContainerStarted","Data":"1ea439f28076dfee011a418a573809ac4dd164a81212247235ad85d05557fccc"} Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.332437 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.462804 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-67c65cc6bd-s49k5" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.463784 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-sg-core-conf-yaml\") pod \"7a351148-31b1-402a-a8f5-0f26f81fddef\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.463847 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c82pl\" (UniqueName: \"kubernetes.io/projected/7a351148-31b1-402a-a8f5-0f26f81fddef-kube-api-access-c82pl\") pod \"7a351148-31b1-402a-a8f5-0f26f81fddef\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.463955 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-log-httpd\") pod \"7a351148-31b1-402a-a8f5-0f26f81fddef\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.464011 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-combined-ca-bundle\") pod \"7a351148-31b1-402a-a8f5-0f26f81fddef\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.464128 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-run-httpd\") pod \"7a351148-31b1-402a-a8f5-0f26f81fddef\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.464214 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-scripts\") pod \"7a351148-31b1-402a-a8f5-0f26f81fddef\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.464292 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-config-data\") pod \"7a351148-31b1-402a-a8f5-0f26f81fddef\" (UID: \"7a351148-31b1-402a-a8f5-0f26f81fddef\") " Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.464820 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7a351148-31b1-402a-a8f5-0f26f81fddef" (UID: "7a351148-31b1-402a-a8f5-0f26f81fddef"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.465130 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7a351148-31b1-402a-a8f5-0f26f81fddef" (UID: "7a351148-31b1-402a-a8f5-0f26f81fddef"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.481011 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-scripts" (OuterVolumeSpecName: "scripts") pod "7a351148-31b1-402a-a8f5-0f26f81fddef" (UID: "7a351148-31b1-402a-a8f5-0f26f81fddef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.487133 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a351148-31b1-402a-a8f5-0f26f81fddef-kube-api-access-c82pl" (OuterVolumeSpecName: "kube-api-access-c82pl") pod "7a351148-31b1-402a-a8f5-0f26f81fddef" (UID: "7a351148-31b1-402a-a8f5-0f26f81fddef"). InnerVolumeSpecName "kube-api-access-c82pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.540102 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b99464548-lx7k9" podUID="d384faa1-3b3b-45f8-bf4b-902236ec40da" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.558911 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-2sl8l"] Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.567817 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-2sl8l"] Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.571099 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c82pl\" (UniqueName: \"kubernetes.io/projected/7a351148-31b1-402a-a8f5-0f26f81fddef-kube-api-access-c82pl\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.571149 4840 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.571162 4840 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7a351148-31b1-402a-a8f5-0f26f81fddef-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.571172 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.718282 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7a351148-31b1-402a-a8f5-0f26f81fddef" (UID: "7a351148-31b1-402a-a8f5-0f26f81fddef"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.748040 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a351148-31b1-402a-a8f5-0f26f81fddef" (UID: "7a351148-31b1-402a-a8f5-0f26f81fddef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.774674 4840 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.775076 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.798511 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-config-data" (OuterVolumeSpecName: "config-data") pod "7a351148-31b1-402a-a8f5-0f26f81fddef" (UID: "7a351148-31b1-402a-a8f5-0f26f81fddef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:42 crc kubenswrapper[4840]: I1205 15:18:42.877201 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a351148-31b1-402a-a8f5-0f26f81fddef-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.201079 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c","Type":"ContainerStarted","Data":"e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70"} Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.203694 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-67bdc7dbc-8r6xf" event={"ID":"9670574b-79d6-495e-abc8-123bf1582742","Type":"ContainerStarted","Data":"7c473ab635e3b3082951b7c0a4b774834175ef1b8526966dabbdc0df8db11cd8"} Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.203753 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-67bdc7dbc-8r6xf" event={"ID":"9670574b-79d6-495e-abc8-123bf1582742","Type":"ContainerStarted","Data":"75abe4ba4d5a6751600845dab04d2fc739374cd87a8611cfe181faf28968f402"} Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.209515 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" event={"ID":"7002d387-4756-4e68-b238-6e9cbf1d9b10","Type":"ContainerStarted","Data":"6c70aa6af6aa0be618ad3601395a119b0a11b7b4c19f40d77e2c1893a1f506c2"} Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.209655 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" event={"ID":"7002d387-4756-4e68-b238-6e9cbf1d9b10","Type":"ContainerStarted","Data":"31048560b3a6294862e7d1bafb9fbf8aadab19952183b6fd99d7f8594cb53968"} Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.226607 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58a6240b-dbe5-4208-b070-3b84e319505d","Type":"ContainerStarted","Data":"8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47"} Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.228019 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-67bdc7dbc-8r6xf" podStartSLOduration=2.37852607 podStartE2EDuration="6.227994639s" podCreationTimestamp="2025-12-05 15:18:37 +0000 UTC" firstStartedPulling="2025-12-05 15:18:38.004105676 +0000 UTC m=+1196.345168300" lastFinishedPulling="2025-12-05 15:18:41.853574255 +0000 UTC m=+1200.194636869" observedRunningTime="2025-12-05 15:18:43.218387547 +0000 UTC m=+1201.559450161" watchObservedRunningTime="2025-12-05 15:18:43.227994639 +0000 UTC m=+1201.569057253" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.230078 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.231513 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" event={"ID":"4be94020-016c-4262-ade5-165fe35d6a2c","Type":"ContainerStarted","Data":"7721b0eefb969a6dd2ee07d134959fd2c68f46365e33634e8d163ef93d8dc7a2"} Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.231561 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.247604 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-57cfd5878-qwpfg" podStartSLOduration=2.302715293 podStartE2EDuration="6.247577284s" podCreationTimestamp="2025-12-05 15:18:37 +0000 UTC" firstStartedPulling="2025-12-05 15:18:37.931336995 +0000 UTC m=+1196.272399609" lastFinishedPulling="2025-12-05 15:18:41.876198986 +0000 UTC m=+1200.217261600" observedRunningTime="2025-12-05 15:18:43.240015789 +0000 UTC m=+1201.581078403" watchObservedRunningTime="2025-12-05 15:18:43.247577284 +0000 UTC m=+1201.588639898" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.290583 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" podStartSLOduration=4.29055932 podStartE2EDuration="4.29055932s" podCreationTimestamp="2025-12-05 15:18:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:43.267720564 +0000 UTC m=+1201.608783178" watchObservedRunningTime="2025-12-05 15:18:43.29055932 +0000 UTC m=+1201.631621934" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.410771 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.422194 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.436173 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:18:43 crc kubenswrapper[4840]: E1205 15:18:43.436651 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" containerName="init" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.436673 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" containerName="init" Dec 05 15:18:43 crc kubenswrapper[4840]: E1205 15:18:43.436694 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="proxy-httpd" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.436705 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="proxy-httpd" Dec 05 15:18:43 crc kubenswrapper[4840]: E1205 15:18:43.436724 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="ceilometer-notification-agent" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.436732 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="ceilometer-notification-agent" Dec 05 15:18:43 crc kubenswrapper[4840]: E1205 15:18:43.436753 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="sg-core" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.436761 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="sg-core" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.437060 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="ceilometer-notification-agent" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.437097 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" containerName="init" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.437108 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="proxy-httpd" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.437124 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" containerName="sg-core" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.438765 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.441327 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.441742 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.464190 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.600171 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.600221 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.600236 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-config-data\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.600306 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-log-httpd\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.600333 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-scripts\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.600363 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldxsw\" (UniqueName: \"kubernetes.io/projected/43119c92-a88a-4cf5-8ed5-0fb88578a642-kube-api-access-ldxsw\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.600400 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-run-httpd\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.701660 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.701699 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.701732 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-config-data\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.701803 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-log-httpd\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.701828 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-scripts\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.701858 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldxsw\" (UniqueName: \"kubernetes.io/projected/43119c92-a88a-4cf5-8ed5-0fb88578a642-kube-api-access-ldxsw\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.701911 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-run-httpd\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.704667 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-log-httpd\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.704693 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-run-httpd\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.714041 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.715279 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-config-data\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.720230 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-scripts\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.720288 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.736640 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldxsw\" (UniqueName: \"kubernetes.io/projected/43119c92-a88a-4cf5-8ed5-0fb88578a642-kube-api-access-ldxsw\") pod \"ceilometer-0\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.781333 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:18:43 crc kubenswrapper[4840]: I1205 15:18:43.848398 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.090412 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a917f9e-3ca3-49c8-9e7f-f6b7b2835961" path="/var/lib/kubelet/pods/1a917f9e-3ca3-49c8-9e7f-f6b7b2835961/volumes" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.091948 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a351148-31b1-402a-a8f5-0f26f81fddef" path="/var/lib/kubelet/pods/7a351148-31b1-402a-a8f5-0f26f81fddef/volumes" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.240692 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58a6240b-dbe5-4208-b070-3b84e319505d","Type":"ContainerStarted","Data":"fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7"} Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.241103 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.241222 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api-log" containerID="cri-o://8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47" gracePeriod=30 Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.241386 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api" containerID="cri-o://fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7" gracePeriod=30 Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.245420 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c","Type":"ContainerStarted","Data":"c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067"} Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.269050 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.269024723 podStartE2EDuration="5.269024723s" podCreationTimestamp="2025-12-05 15:18:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:44.268269531 +0000 UTC m=+1202.609332145" watchObservedRunningTime="2025-12-05 15:18:44.269024723 +0000 UTC m=+1202.610087337" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.303022 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.540362219 podStartE2EDuration="5.303001755s" podCreationTimestamp="2025-12-05 15:18:39 +0000 UTC" firstStartedPulling="2025-12-05 15:18:40.210660149 +0000 UTC m=+1198.551722763" lastFinishedPulling="2025-12-05 15:18:41.973299685 +0000 UTC m=+1200.314362299" observedRunningTime="2025-12-05 15:18:44.298708843 +0000 UTC m=+1202.639771457" watchObservedRunningTime="2025-12-05 15:18:44.303001755 +0000 UTC m=+1202.644064359" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.356155 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:18:44 crc kubenswrapper[4840]: W1205 15:18:44.394984 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43119c92_a88a_4cf5_8ed5_0fb88578a642.slice/crio-8c2f3b532682adbb947bfbb77cc72f71aa5d41605f9db8eb3e9f72e563646c2b WatchSource:0}: Error finding container 8c2f3b532682adbb947bfbb77cc72f71aa5d41605f9db8eb3e9f72e563646c2b: Status 404 returned error can't find the container with id 8c2f3b532682adbb947bfbb77cc72f71aa5d41605f9db8eb3e9f72e563646c2b Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.469543 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-897596894-64sgb"] Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.471354 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.474672 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.474821 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.483479 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-897596894-64sgb"] Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.601036 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.619540 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-logs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.619608 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-config-data\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.619813 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7csr\" (UniqueName: \"kubernetes.io/projected/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-kube-api-access-q7csr\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.619941 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-combined-ca-bundle\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.620061 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-public-tls-certs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.620116 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-internal-tls-certs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.620149 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-config-data-custom\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.721662 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-config-data\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.721748 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7csr\" (UniqueName: \"kubernetes.io/projected/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-kube-api-access-q7csr\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.721786 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-combined-ca-bundle\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.721828 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-public-tls-certs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.721851 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-internal-tls-certs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.721896 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-config-data-custom\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.722008 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-logs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.722464 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-logs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.732584 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-config-data-custom\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.732984 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-internal-tls-certs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.732996 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-public-tls-certs\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.733109 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-config-data\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.742800 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-combined-ca-bundle\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.748667 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7csr\" (UniqueName: \"kubernetes.io/projected/9eb8c11f-7d8a-4330-804c-e9fa74cd10e7-kube-api-access-q7csr\") pod \"barbican-api-897596894-64sgb\" (UID: \"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7\") " pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:44 crc kubenswrapper[4840]: I1205 15:18:44.789179 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:45 crc kubenswrapper[4840]: I1205 15:18:45.275139 4840 generic.go:334] "Generic (PLEG): container finished" podID="58a6240b-dbe5-4208-b070-3b84e319505d" containerID="8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47" exitCode=143 Dec 05 15:18:45 crc kubenswrapper[4840]: I1205 15:18:45.275253 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58a6240b-dbe5-4208-b070-3b84e319505d","Type":"ContainerDied","Data":"8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47"} Dec 05 15:18:45 crc kubenswrapper[4840]: I1205 15:18:45.280103 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerStarted","Data":"8c2f3b532682adbb947bfbb77cc72f71aa5d41605f9db8eb3e9f72e563646c2b"} Dec 05 15:18:45 crc kubenswrapper[4840]: I1205 15:18:45.316006 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-897596894-64sgb"] Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.110683 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.296825 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-897596894-64sgb" event={"ID":"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7","Type":"ContainerStarted","Data":"4cff50b694118f648337847278c81be517660800043cbc2911fe0901cef82534"} Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.297194 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-897596894-64sgb" event={"ID":"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7","Type":"ContainerStarted","Data":"f5c9bd8c705685e97e3dfcd725def9642f8026f7602de5f0e4cf62818e7b67bd"} Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.297208 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-897596894-64sgb" event={"ID":"9eb8c11f-7d8a-4330-804c-e9fa74cd10e7","Type":"ContainerStarted","Data":"b94904088ae48202e630107d4aa746e928be5b75b7e0292a6bd4cace862b9ce8"} Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.297931 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.298207 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.310525 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerStarted","Data":"1e21d80f6ee09b18e18b9093db304b2584ec6f6ed2c9717d48c0c8a38e67a6e0"} Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.310635 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerStarted","Data":"d20bfffd119f58c66ce71854f59c03009f8cb8ba8c34953b2e93abd73660d44b"} Dec 05 15:18:46 crc kubenswrapper[4840]: I1205 15:18:46.399441 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-897596894-64sgb" podStartSLOduration=2.39941577 podStartE2EDuration="2.39941577s" podCreationTimestamp="2025-12-05 15:18:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:46.38032378 +0000 UTC m=+1204.721386394" watchObservedRunningTime="2025-12-05 15:18:46.39941577 +0000 UTC m=+1204.740478384" Dec 05 15:18:47 crc kubenswrapper[4840]: I1205 15:18:47.321472 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerStarted","Data":"c1a0242ba49506f2d139a853605a01164c500e4d585c9ee32d16fb49147c9b6e"} Dec 05 15:18:48 crc kubenswrapper[4840]: I1205 15:18:48.334619 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerStarted","Data":"2ab6522cfe2481446f96635ec21e80e3bb7a98bd897c1e0659884a874435effc"} Dec 05 15:18:48 crc kubenswrapper[4840]: I1205 15:18:48.335168 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:18:48 crc kubenswrapper[4840]: I1205 15:18:48.371559 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.15615591 podStartE2EDuration="5.371533576s" podCreationTimestamp="2025-12-05 15:18:43 +0000 UTC" firstStartedPulling="2025-12-05 15:18:44.399103296 +0000 UTC m=+1202.740165910" lastFinishedPulling="2025-12-05 15:18:47.614480962 +0000 UTC m=+1205.955543576" observedRunningTime="2025-12-05 15:18:48.359493955 +0000 UTC m=+1206.700556569" watchObservedRunningTime="2025-12-05 15:18:48.371533576 +0000 UTC m=+1206.712596200" Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.336249 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6d557dcbfc-24qsg" Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.425613 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-65db99bfb4-5mjgn"] Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.426101 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-65db99bfb4-5mjgn" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-api" containerID="cri-o://f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5" gracePeriod=30 Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.427251 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-65db99bfb4-5mjgn" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-httpd" containerID="cri-o://4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9" gracePeriod=30 Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.561207 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.669909 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.889056 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.927033 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.930921 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.996614 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-rl6pm"] Dec 05 15:18:49 crc kubenswrapper[4840]: I1205 15:18:49.996938 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" containerName="dnsmasq-dns" containerID="cri-o://1822e2ad6028e2ca5c08e888e8eda32e67a7addb69d98f2ebd705cc067aaa107" gracePeriod=10 Dec 05 15:18:50 crc kubenswrapper[4840]: I1205 15:18:50.378053 4840 generic.go:334] "Generic (PLEG): container finished" podID="15fbee23-d910-410f-ae76-91c967143dca" containerID="4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9" exitCode=0 Dec 05 15:18:50 crc kubenswrapper[4840]: I1205 15:18:50.378387 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65db99bfb4-5mjgn" event={"ID":"15fbee23-d910-410f-ae76-91c967143dca","Type":"ContainerDied","Data":"4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9"} Dec 05 15:18:50 crc kubenswrapper[4840]: I1205 15:18:50.393028 4840 generic.go:334] "Generic (PLEG): container finished" podID="e7360138-33f0-4582-86d8-4eced23bb05b" containerID="1822e2ad6028e2ca5c08e888e8eda32e67a7addb69d98f2ebd705cc067aaa107" exitCode=0 Dec 05 15:18:50 crc kubenswrapper[4840]: I1205 15:18:50.393658 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" event={"ID":"e7360138-33f0-4582-86d8-4eced23bb05b","Type":"ContainerDied","Data":"1822e2ad6028e2ca5c08e888e8eda32e67a7addb69d98f2ebd705cc067aaa107"} Dec 05 15:18:50 crc kubenswrapper[4840]: I1205 15:18:50.393987 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="cinder-scheduler" containerID="cri-o://e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70" gracePeriod=30 Dec 05 15:18:50 crc kubenswrapper[4840]: I1205 15:18:50.394306 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="probe" containerID="cri-o://c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067" gracePeriod=30 Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.058234 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.180888 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-svc\") pod \"e7360138-33f0-4582-86d8-4eced23bb05b\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.181015 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-nb\") pod \"e7360138-33f0-4582-86d8-4eced23bb05b\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.181053 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlqff\" (UniqueName: \"kubernetes.io/projected/e7360138-33f0-4582-86d8-4eced23bb05b-kube-api-access-jlqff\") pod \"e7360138-33f0-4582-86d8-4eced23bb05b\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.181182 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-sb\") pod \"e7360138-33f0-4582-86d8-4eced23bb05b\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.181228 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-config\") pod \"e7360138-33f0-4582-86d8-4eced23bb05b\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.181286 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-swift-storage-0\") pod \"e7360138-33f0-4582-86d8-4eced23bb05b\" (UID: \"e7360138-33f0-4582-86d8-4eced23bb05b\") " Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.188152 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7360138-33f0-4582-86d8-4eced23bb05b-kube-api-access-jlqff" (OuterVolumeSpecName: "kube-api-access-jlqff") pod "e7360138-33f0-4582-86d8-4eced23bb05b" (UID: "e7360138-33f0-4582-86d8-4eced23bb05b"). InnerVolumeSpecName "kube-api-access-jlqff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.280067 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e7360138-33f0-4582-86d8-4eced23bb05b" (UID: "e7360138-33f0-4582-86d8-4eced23bb05b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.288035 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlqff\" (UniqueName: \"kubernetes.io/projected/e7360138-33f0-4582-86d8-4eced23bb05b-kube-api-access-jlqff\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.288067 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.313341 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e7360138-33f0-4582-86d8-4eced23bb05b" (UID: "e7360138-33f0-4582-86d8-4eced23bb05b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.327615 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e7360138-33f0-4582-86d8-4eced23bb05b" (UID: "e7360138-33f0-4582-86d8-4eced23bb05b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.366358 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e7360138-33f0-4582-86d8-4eced23bb05b" (UID: "e7360138-33f0-4582-86d8-4eced23bb05b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.372198 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-config" (OuterVolumeSpecName: "config") pod "e7360138-33f0-4582-86d8-4eced23bb05b" (UID: "e7360138-33f0-4582-86d8-4eced23bb05b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.390103 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.390135 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.390145 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.390156 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e7360138-33f0-4582-86d8-4eced23bb05b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.426783 4840 generic.go:334] "Generic (PLEG): container finished" podID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerID="e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70" exitCode=0 Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.426911 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c","Type":"ContainerDied","Data":"e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70"} Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.445036 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" event={"ID":"e7360138-33f0-4582-86d8-4eced23bb05b","Type":"ContainerDied","Data":"d6cfe33c066236e015cad0adf77c3226649ea187926b47451a7d2601eb6dfec8"} Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.445093 4840 scope.go:117] "RemoveContainer" containerID="1822e2ad6028e2ca5c08e888e8eda32e67a7addb69d98f2ebd705cc067aaa107" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.445238 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.489946 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-rl6pm"] Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.509089 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-rl6pm"] Dec 05 15:18:51 crc kubenswrapper[4840]: I1205 15:18:51.509098 4840 scope.go:117] "RemoveContainer" containerID="b97bd6ff2f33bebebb36f2a3a0029ee2722d4522e2500c7f7f4de263dbb526a4" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.087026 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" path="/var/lib/kubelet/pods/e7360138-33f0-4582-86d8-4eced23bb05b/volumes" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.170627 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.313569 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-scripts\") pod \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.313640 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data\") pod \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.313689 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-combined-ca-bundle\") pod \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.313746 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data-custom\") pod \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.313791 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j58k\" (UniqueName: \"kubernetes.io/projected/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-kube-api-access-5j58k\") pod \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.313885 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-etc-machine-id\") pod \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\" (UID: \"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c\") " Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.314943 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" (UID: "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.319054 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-scripts" (OuterVolumeSpecName: "scripts") pod "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" (UID: "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.322884 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-kube-api-access-5j58k" (OuterVolumeSpecName: "kube-api-access-5j58k") pod "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" (UID: "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c"). InnerVolumeSpecName "kube-api-access-5j58k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.333779 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" (UID: "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.403755 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" (UID: "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.417709 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.417745 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.417759 4840 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.417768 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j58k\" (UniqueName: \"kubernetes.io/projected/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-kube-api-access-5j58k\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.417777 4840 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.462016 4840 generic.go:334] "Generic (PLEG): container finished" podID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerID="c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067" exitCode=0 Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.462060 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c","Type":"ContainerDied","Data":"c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067"} Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.462083 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c","Type":"ContainerDied","Data":"92d4245cba7871671c5e58eed3b86235154fa8947917a8fe6f8545d600ef5fd9"} Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.462099 4840 scope.go:117] "RemoveContainer" containerID="c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.462214 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.492086 4840 scope.go:117] "RemoveContainer" containerID="e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.497264 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data" (OuterVolumeSpecName: "config-data") pod "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" (UID: "f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.512258 4840 scope.go:117] "RemoveContainer" containerID="c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067" Dec 05 15:18:52 crc kubenswrapper[4840]: E1205 15:18:52.512784 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067\": container with ID starting with c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067 not found: ID does not exist" containerID="c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.512815 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067"} err="failed to get container status \"c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067\": rpc error: code = NotFound desc = could not find container \"c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067\": container with ID starting with c68abd15e10ec3bc8f527be37fa0e40a3fea5414ddf9ae38362ec367eb96a067 not found: ID does not exist" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.512855 4840 scope.go:117] "RemoveContainer" containerID="e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70" Dec 05 15:18:52 crc kubenswrapper[4840]: E1205 15:18:52.514025 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70\": container with ID starting with e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70 not found: ID does not exist" containerID="e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.514052 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70"} err="failed to get container status \"e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70\": rpc error: code = NotFound desc = could not find container \"e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70\": container with ID starting with e2a9ad7d895504f96a731d02835357b24b2817379d25516fbf12356b233dce70 not found: ID does not exist" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.519088 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.815765 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.846648 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.860784 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:52 crc kubenswrapper[4840]: E1205 15:18:52.861228 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="cinder-scheduler" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.861245 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="cinder-scheduler" Dec 05 15:18:52 crc kubenswrapper[4840]: E1205 15:18:52.861263 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="probe" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.861269 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="probe" Dec 05 15:18:52 crc kubenswrapper[4840]: E1205 15:18:52.861289 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" containerName="dnsmasq-dns" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.861295 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" containerName="dnsmasq-dns" Dec 05 15:18:52 crc kubenswrapper[4840]: E1205 15:18:52.861320 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" containerName="init" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.861326 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" containerName="init" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.861495 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" containerName="dnsmasq-dns" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.861509 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="cinder-scheduler" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.861522 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" containerName="probe" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.862495 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.866278 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.874335 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.941804 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd99a832-4ebb-49a0-88ae-89f2b247da8a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.941950 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-scripts\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.941989 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.942074 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.942159 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z2qm\" (UniqueName: \"kubernetes.io/projected/bd99a832-4ebb-49a0-88ae-89f2b247da8a-kube-api-access-2z2qm\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:52 crc kubenswrapper[4840]: I1205 15:18:52.942204 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-config-data\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.043329 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.043663 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.043825 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z2qm\" (UniqueName: \"kubernetes.io/projected/bd99a832-4ebb-49a0-88ae-89f2b247da8a-kube-api-access-2z2qm\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.043962 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-config-data\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.044102 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd99a832-4ebb-49a0-88ae-89f2b247da8a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.044328 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-scripts\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.044387 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bd99a832-4ebb-49a0-88ae-89f2b247da8a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.049351 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.051763 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-scripts\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.052286 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.063623 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z2qm\" (UniqueName: \"kubernetes.io/projected/bd99a832-4ebb-49a0-88ae-89f2b247da8a-kube-api-access-2z2qm\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.063820 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd99a832-4ebb-49a0-88ae-89f2b247da8a-config-data\") pod \"cinder-scheduler-0\" (UID: \"bd99a832-4ebb-49a0-88ae-89f2b247da8a\") " pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.169535 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.181826 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 15:18:53 crc kubenswrapper[4840]: W1205 15:18:53.749819 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd99a832_4ebb_49a0_88ae_89f2b247da8a.slice/crio-a557d66119614140d01bc41bda8d06063942f98ecfcc1640b8ec979253a18da3 WatchSource:0}: Error finding container a557d66119614140d01bc41bda8d06063942f98ecfcc1640b8ec979253a18da3: Status 404 returned error can't find the container with id a557d66119614140d01bc41bda8d06063942f98ecfcc1640b8ec979253a18da3 Dec 05 15:18:53 crc kubenswrapper[4840]: I1205 15:18:53.751718 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 15:18:54 crc kubenswrapper[4840]: I1205 15:18:54.083742 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c" path="/var/lib/kubelet/pods/f1632c9a-90cf-49f9-9cbe-96b1b1b53e7c/volumes" Dec 05 15:18:54 crc kubenswrapper[4840]: I1205 15:18:54.502923 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bd99a832-4ebb-49a0-88ae-89f2b247da8a","Type":"ContainerStarted","Data":"e97b5e2e713339941e822ee0976c3286594f53fc7d2ef59bdd9469f773bda395"} Dec 05 15:18:54 crc kubenswrapper[4840]: I1205 15:18:54.503300 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bd99a832-4ebb-49a0-88ae-89f2b247da8a","Type":"ContainerStarted","Data":"a557d66119614140d01bc41bda8d06063942f98ecfcc1640b8ec979253a18da3"} Dec 05 15:18:54 crc kubenswrapper[4840]: I1205 15:18:54.998645 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:18:55 crc kubenswrapper[4840]: I1205 15:18:55.067929 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:18:55 crc kubenswrapper[4840]: I1205 15:18:55.512979 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"bd99a832-4ebb-49a0-88ae-89f2b247da8a","Type":"ContainerStarted","Data":"abbbcf0b1dbc31e09e7ce892fa54b612a7c586232e83c625d27c9fed7dc8d6ec"} Dec 05 15:18:55 crc kubenswrapper[4840]: I1205 15:18:55.542240 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.542213507 podStartE2EDuration="3.542213507s" podCreationTimestamp="2025-12-05 15:18:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:18:55.5338504 +0000 UTC m=+1213.874913014" watchObservedRunningTime="2025-12-05 15:18:55.542213507 +0000 UTC m=+1213.883276121" Dec 05 15:18:55 crc kubenswrapper[4840]: I1205 15:18:55.944691 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5ccc5c4795-rl6pm" podUID="e7360138-33f0-4582-86d8-4eced23bb05b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.153:5353: i/o timeout" Dec 05 15:18:56 crc kubenswrapper[4840]: I1205 15:18:56.469307 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:56 crc kubenswrapper[4840]: I1205 15:18:56.568487 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-897596894-64sgb" Dec 05 15:18:56 crc kubenswrapper[4840]: I1205 15:18:56.670199 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-57c5d544f8-4d7j9"] Dec 05 15:18:56 crc kubenswrapper[4840]: I1205 15:18:56.670449 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-57c5d544f8-4d7j9" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api-log" containerID="cri-o://268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f" gracePeriod=30 Dec 05 15:18:56 crc kubenswrapper[4840]: I1205 15:18:56.670524 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-57c5d544f8-4d7j9" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api" containerID="cri-o://8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9" gracePeriod=30 Dec 05 15:18:57 crc kubenswrapper[4840]: I1205 15:18:57.185395 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7b99464548-lx7k9" Dec 05 15:18:57 crc kubenswrapper[4840]: I1205 15:18:57.250582 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:18:57 crc kubenswrapper[4840]: I1205 15:18:57.252626 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67c65cc6bd-s49k5"] Dec 05 15:18:57 crc kubenswrapper[4840]: I1205 15:18:57.530291 4840 generic.go:334] "Generic (PLEG): container finished" podID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerID="268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f" exitCode=143 Dec 05 15:18:57 crc kubenswrapper[4840]: I1205 15:18:57.530369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c5d544f8-4d7j9" event={"ID":"c6906a25-b26e-4510-aa75-2227ff1f3e1a","Type":"ContainerDied","Data":"268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f"} Dec 05 15:18:57 crc kubenswrapper[4840]: I1205 15:18:57.531720 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-67c65cc6bd-s49k5" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon-log" containerID="cri-o://0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65" gracePeriod=30 Dec 05 15:18:57 crc kubenswrapper[4840]: I1205 15:18:57.531852 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-67c65cc6bd-s49k5" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" containerID="cri-o://d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96" gracePeriod=30 Dec 05 15:18:58 crc kubenswrapper[4840]: I1205 15:18:58.182319 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 15:18:58 crc kubenswrapper[4840]: I1205 15:18:58.919466 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:58 crc kubenswrapper[4840]: I1205 15:18:58.920386 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5d878656b-vljqr" Dec 05 15:18:59 crc kubenswrapper[4840]: I1205 15:18:59.423080 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7b75bf6fbf-wj8dt" Dec 05 15:18:59 crc kubenswrapper[4840]: I1205 15:18:59.837287 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-57c5d544f8-4d7j9" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:58012->10.217.0.162:9311: read: connection reset by peer" Dec 05 15:18:59 crc kubenswrapper[4840]: I1205 15:18:59.837287 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-57c5d544f8-4d7j9" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:58028->10.217.0.162:9311: read: connection reset by peer" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.337552 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.421916 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data-custom\") pod \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.421999 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-combined-ca-bundle\") pod \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.422071 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tms8x\" (UniqueName: \"kubernetes.io/projected/c6906a25-b26e-4510-aa75-2227ff1f3e1a-kube-api-access-tms8x\") pod \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.422126 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data\") pod \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.422154 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6906a25-b26e-4510-aa75-2227ff1f3e1a-logs\") pod \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\" (UID: \"c6906a25-b26e-4510-aa75-2227ff1f3e1a\") " Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.423239 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6906a25-b26e-4510-aa75-2227ff1f3e1a-logs" (OuterVolumeSpecName: "logs") pod "c6906a25-b26e-4510-aa75-2227ff1f3e1a" (UID: "c6906a25-b26e-4510-aa75-2227ff1f3e1a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.428077 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c6906a25-b26e-4510-aa75-2227ff1f3e1a" (UID: "c6906a25-b26e-4510-aa75-2227ff1f3e1a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.428336 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6906a25-b26e-4510-aa75-2227ff1f3e1a-kube-api-access-tms8x" (OuterVolumeSpecName: "kube-api-access-tms8x") pod "c6906a25-b26e-4510-aa75-2227ff1f3e1a" (UID: "c6906a25-b26e-4510-aa75-2227ff1f3e1a"). InnerVolumeSpecName "kube-api-access-tms8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.452444 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6906a25-b26e-4510-aa75-2227ff1f3e1a" (UID: "c6906a25-b26e-4510-aa75-2227ff1f3e1a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.481154 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data" (OuterVolumeSpecName: "config-data") pod "c6906a25-b26e-4510-aa75-2227ff1f3e1a" (UID: "c6906a25-b26e-4510-aa75-2227ff1f3e1a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.524733 4840 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.524777 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.524788 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tms8x\" (UniqueName: \"kubernetes.io/projected/c6906a25-b26e-4510-aa75-2227ff1f3e1a-kube-api-access-tms8x\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.524799 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6906a25-b26e-4510-aa75-2227ff1f3e1a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.524813 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6906a25-b26e-4510-aa75-2227ff1f3e1a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.567807 4840 generic.go:334] "Generic (PLEG): container finished" podID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerID="8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9" exitCode=0 Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.567892 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c5d544f8-4d7j9" event={"ID":"c6906a25-b26e-4510-aa75-2227ff1f3e1a","Type":"ContainerDied","Data":"8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9"} Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.567941 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57c5d544f8-4d7j9" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.567970 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-57c5d544f8-4d7j9" event={"ID":"c6906a25-b26e-4510-aa75-2227ff1f3e1a","Type":"ContainerDied","Data":"5cc55b0dd5245dadd032529eb173e9ebe2e0bebc3ec59cb7a42b9488ae017015"} Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.567997 4840 scope.go:117] "RemoveContainer" containerID="8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.598813 4840 scope.go:117] "RemoveContainer" containerID="268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.606242 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-57c5d544f8-4d7j9"] Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.615142 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-57c5d544f8-4d7j9"] Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.640677 4840 scope.go:117] "RemoveContainer" containerID="8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9" Dec 05 15:19:00 crc kubenswrapper[4840]: E1205 15:19:00.641717 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9\": container with ID starting with 8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9 not found: ID does not exist" containerID="8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.641749 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9"} err="failed to get container status \"8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9\": rpc error: code = NotFound desc = could not find container \"8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9\": container with ID starting with 8f533137e6f3ea4a1bc5cd613d88aa031669a40b5da29f855a7f3e2d91ba83b9 not found: ID does not exist" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.641771 4840 scope.go:117] "RemoveContainer" containerID="268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f" Dec 05 15:19:00 crc kubenswrapper[4840]: E1205 15:19:00.642271 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f\": container with ID starting with 268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f not found: ID does not exist" containerID="268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f" Dec 05 15:19:00 crc kubenswrapper[4840]: I1205 15:19:00.642338 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f"} err="failed to get container status \"268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f\": rpc error: code = NotFound desc = could not find container \"268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f\": container with ID starting with 268aff6bade259ee1c5aacf59eeb6704a0ea0e317e2f3e0232ffb57fc7880f9f not found: ID does not exist" Dec 05 15:19:01 crc kubenswrapper[4840]: I1205 15:19:01.585057 4840 generic.go:334] "Generic (PLEG): container finished" podID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerID="d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96" exitCode=0 Dec 05 15:19:01 crc kubenswrapper[4840]: I1205 15:19:01.585676 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67c65cc6bd-s49k5" event={"ID":"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1","Type":"ContainerDied","Data":"d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96"} Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.079608 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" path="/var/lib/kubelet/pods/c6906a25-b26e-4510-aa75-2227ff1f3e1a/volumes" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.435315 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-67c65cc6bd-s49k5" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.533594 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 15:19:02 crc kubenswrapper[4840]: E1205 15:19:02.534053 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.534076 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api" Dec 05 15:19:02 crc kubenswrapper[4840]: E1205 15:19:02.534111 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api-log" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.534120 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api-log" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.534387 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api-log" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.534408 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6906a25-b26e-4510-aa75-2227ff1f3e1a" containerName="barbican-api" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.535204 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.539885 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.540252 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-9g5vd" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.545667 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.546072 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.671312 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304fce22-b828-4844-9db0-13120847afc1-combined-ca-bundle\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.671768 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/304fce22-b828-4844-9db0-13120847afc1-openstack-config-secret\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.671899 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zrbj\" (UniqueName: \"kubernetes.io/projected/304fce22-b828-4844-9db0-13120847afc1-kube-api-access-5zrbj\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.671935 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/304fce22-b828-4844-9db0-13120847afc1-openstack-config\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.774176 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304fce22-b828-4844-9db0-13120847afc1-combined-ca-bundle\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.774269 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/304fce22-b828-4844-9db0-13120847afc1-openstack-config-secret\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.774327 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zrbj\" (UniqueName: \"kubernetes.io/projected/304fce22-b828-4844-9db0-13120847afc1-kube-api-access-5zrbj\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.774350 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/304fce22-b828-4844-9db0-13120847afc1-openstack-config\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.775440 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/304fce22-b828-4844-9db0-13120847afc1-openstack-config\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.782482 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/304fce22-b828-4844-9db0-13120847afc1-openstack-config-secret\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.782776 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/304fce22-b828-4844-9db0-13120847afc1-combined-ca-bundle\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.799341 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zrbj\" (UniqueName: \"kubernetes.io/projected/304fce22-b828-4844-9db0-13120847afc1-kube-api-access-5zrbj\") pod \"openstackclient\" (UID: \"304fce22-b828-4844-9db0-13120847afc1\") " pod="openstack/openstackclient" Dec 05 15:19:02 crc kubenswrapper[4840]: I1205 15:19:02.862845 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 15:19:03 crc kubenswrapper[4840]: I1205 15:19:03.385348 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 15:19:03 crc kubenswrapper[4840]: I1205 15:19:03.450326 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 15:19:03 crc kubenswrapper[4840]: I1205 15:19:03.605703 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"304fce22-b828-4844-9db0-13120847afc1","Type":"ContainerStarted","Data":"7cb73fda60ba06d488df03c082cf74949aa568e2a14ccbce39ce35a2c3bf17d8"} Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.376829 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.509046 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-config\") pod \"15fbee23-d910-410f-ae76-91c967143dca\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.509101 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-combined-ca-bundle\") pod \"15fbee23-d910-410f-ae76-91c967143dca\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.509171 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhgts\" (UniqueName: \"kubernetes.io/projected/15fbee23-d910-410f-ae76-91c967143dca-kube-api-access-lhgts\") pod \"15fbee23-d910-410f-ae76-91c967143dca\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.509294 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-ovndb-tls-certs\") pod \"15fbee23-d910-410f-ae76-91c967143dca\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.509376 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-httpd-config\") pod \"15fbee23-d910-410f-ae76-91c967143dca\" (UID: \"15fbee23-d910-410f-ae76-91c967143dca\") " Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.528214 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15fbee23-d910-410f-ae76-91c967143dca-kube-api-access-lhgts" (OuterVolumeSpecName: "kube-api-access-lhgts") pod "15fbee23-d910-410f-ae76-91c967143dca" (UID: "15fbee23-d910-410f-ae76-91c967143dca"). InnerVolumeSpecName "kube-api-access-lhgts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.529702 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "15fbee23-d910-410f-ae76-91c967143dca" (UID: "15fbee23-d910-410f-ae76-91c967143dca"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.598335 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-config" (OuterVolumeSpecName: "config") pod "15fbee23-d910-410f-ae76-91c967143dca" (UID: "15fbee23-d910-410f-ae76-91c967143dca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.612292 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.612333 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhgts\" (UniqueName: \"kubernetes.io/projected/15fbee23-d910-410f-ae76-91c967143dca-kube-api-access-lhgts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.612348 4840 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.618888 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15fbee23-d910-410f-ae76-91c967143dca" (UID: "15fbee23-d910-410f-ae76-91c967143dca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.620165 4840 generic.go:334] "Generic (PLEG): container finished" podID="15fbee23-d910-410f-ae76-91c967143dca" containerID="f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5" exitCode=0 Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.620217 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65db99bfb4-5mjgn" event={"ID":"15fbee23-d910-410f-ae76-91c967143dca","Type":"ContainerDied","Data":"f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5"} Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.620256 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65db99bfb4-5mjgn" event={"ID":"15fbee23-d910-410f-ae76-91c967143dca","Type":"ContainerDied","Data":"4f8a8f04509fce79d9c7415f91da743c1660732807eb3ad63e826b04c4263ef2"} Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.620275 4840 scope.go:117] "RemoveContainer" containerID="4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.620333 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65db99bfb4-5mjgn" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.644176 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "15fbee23-d910-410f-ae76-91c967143dca" (UID: "15fbee23-d910-410f-ae76-91c967143dca"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.653394 4840 scope.go:117] "RemoveContainer" containerID="f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.682539 4840 scope.go:117] "RemoveContainer" containerID="4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9" Dec 05 15:19:04 crc kubenswrapper[4840]: E1205 15:19:04.683069 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9\": container with ID starting with 4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9 not found: ID does not exist" containerID="4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.683194 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9"} err="failed to get container status \"4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9\": rpc error: code = NotFound desc = could not find container \"4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9\": container with ID starting with 4904b238ed6aa1efa8cf090e2250222b04c10bb19ca0ea5574326ad17148c3a9 not found: ID does not exist" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.683295 4840 scope.go:117] "RemoveContainer" containerID="f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5" Dec 05 15:19:04 crc kubenswrapper[4840]: E1205 15:19:04.683780 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5\": container with ID starting with f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5 not found: ID does not exist" containerID="f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.683813 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5"} err="failed to get container status \"f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5\": rpc error: code = NotFound desc = could not find container \"f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5\": container with ID starting with f070c57e27ea20004c70c105a1f4692127a82b413a3acac42ee85f84b20df5d5 not found: ID does not exist" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.713741 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.713778 4840 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/15fbee23-d910-410f-ae76-91c967143dca-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.961412 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-65db99bfb4-5mjgn"] Dec 05 15:19:04 crc kubenswrapper[4840]: I1205 15:19:04.972010 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-65db99bfb4-5mjgn"] Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.078025 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15fbee23-d910-410f-ae76-91c967143dca" path="/var/lib/kubelet/pods/15fbee23-d910-410f-ae76-91c967143dca/volumes" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.620608 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-74d87df5bc-m4vp7"] Dec 05 15:19:06 crc kubenswrapper[4840]: E1205 15:19:06.621345 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-api" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.621372 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-api" Dec 05 15:19:06 crc kubenswrapper[4840]: E1205 15:19:06.621393 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-httpd" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.621400 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-httpd" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.621653 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-api" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.621674 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="15fbee23-d910-410f-ae76-91c967143dca" containerName="neutron-httpd" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.627133 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.632638 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.632971 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.633055 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.641556 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-74d87df5bc-m4vp7"] Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.758174 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-public-tls-certs\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.768621 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217ba67-c10d-43b2-8e12-41c6c25aa2da-run-httpd\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.769388 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8217ba67-c10d-43b2-8e12-41c6c25aa2da-etc-swift\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.769591 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-config-data\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.770124 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217ba67-c10d-43b2-8e12-41c6c25aa2da-log-httpd\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.770293 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7dpl\" (UniqueName: \"kubernetes.io/projected/8217ba67-c10d-43b2-8e12-41c6c25aa2da-kube-api-access-w7dpl\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.770480 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-combined-ca-bundle\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.770797 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-internal-tls-certs\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.778427 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.779053 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-central-agent" containerID="cri-o://d20bfffd119f58c66ce71854f59c03009f8cb8ba8c34953b2e93abd73660d44b" gracePeriod=30 Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.779208 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="proxy-httpd" containerID="cri-o://2ab6522cfe2481446f96635ec21e80e3bb7a98bd897c1e0659884a874435effc" gracePeriod=30 Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.779290 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-notification-agent" containerID="cri-o://1e21d80f6ee09b18e18b9093db304b2584ec6f6ed2c9717d48c0c8a38e67a6e0" gracePeriod=30 Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.779458 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="sg-core" containerID="cri-o://c1a0242ba49506f2d139a853605a01164c500e4d585c9ee32d16fb49147c9b6e" gracePeriod=30 Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.812615 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.166:3000/\": EOF" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872422 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-combined-ca-bundle\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872754 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-internal-tls-certs\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872821 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-public-tls-certs\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872843 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217ba67-c10d-43b2-8e12-41c6c25aa2da-run-httpd\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872904 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8217ba67-c10d-43b2-8e12-41c6c25aa2da-etc-swift\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872946 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-config-data\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872968 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217ba67-c10d-43b2-8e12-41c6c25aa2da-log-httpd\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.872993 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7dpl\" (UniqueName: \"kubernetes.io/projected/8217ba67-c10d-43b2-8e12-41c6c25aa2da-kube-api-access-w7dpl\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.874927 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217ba67-c10d-43b2-8e12-41c6c25aa2da-log-httpd\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.875454 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8217ba67-c10d-43b2-8e12-41c6c25aa2da-run-httpd\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.878428 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-combined-ca-bundle\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.878500 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-config-data\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.879833 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8217ba67-c10d-43b2-8e12-41c6c25aa2da-etc-swift\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.880781 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-internal-tls-certs\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.881456 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8217ba67-c10d-43b2-8e12-41c6c25aa2da-public-tls-certs\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.890475 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7dpl\" (UniqueName: \"kubernetes.io/projected/8217ba67-c10d-43b2-8e12-41c6c25aa2da-kube-api-access-w7dpl\") pod \"swift-proxy-74d87df5bc-m4vp7\" (UID: \"8217ba67-c10d-43b2-8e12-41c6c25aa2da\") " pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:06 crc kubenswrapper[4840]: I1205 15:19:06.962767 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.567382 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-74d87df5bc-m4vp7"] Dec 05 15:19:07 crc kubenswrapper[4840]: W1205 15:19:07.570568 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8217ba67_c10d_43b2_8e12_41c6c25aa2da.slice/crio-3a35424a20dccf6bc99ed085a9222558f699ad9e4f522dbb069425bfc6b60809 WatchSource:0}: Error finding container 3a35424a20dccf6bc99ed085a9222558f699ad9e4f522dbb069425bfc6b60809: Status 404 returned error can't find the container with id 3a35424a20dccf6bc99ed085a9222558f699ad9e4f522dbb069425bfc6b60809 Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.674105 4840 generic.go:334] "Generic (PLEG): container finished" podID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerID="2ab6522cfe2481446f96635ec21e80e3bb7a98bd897c1e0659884a874435effc" exitCode=0 Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.674143 4840 generic.go:334] "Generic (PLEG): container finished" podID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerID="c1a0242ba49506f2d139a853605a01164c500e4d585c9ee32d16fb49147c9b6e" exitCode=2 Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.674154 4840 generic.go:334] "Generic (PLEG): container finished" podID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerID="d20bfffd119f58c66ce71854f59c03009f8cb8ba8c34953b2e93abd73660d44b" exitCode=0 Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.674198 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerDied","Data":"2ab6522cfe2481446f96635ec21e80e3bb7a98bd897c1e0659884a874435effc"} Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.674230 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerDied","Data":"c1a0242ba49506f2d139a853605a01164c500e4d585c9ee32d16fb49147c9b6e"} Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.674243 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerDied","Data":"d20bfffd119f58c66ce71854f59c03009f8cb8ba8c34953b2e93abd73660d44b"} Dec 05 15:19:07 crc kubenswrapper[4840]: I1205 15:19:07.675507 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-74d87df5bc-m4vp7" event={"ID":"8217ba67-c10d-43b2-8e12-41c6c25aa2da","Type":"ContainerStarted","Data":"3a35424a20dccf6bc99ed085a9222558f699ad9e4f522dbb069425bfc6b60809"} Dec 05 15:19:08 crc kubenswrapper[4840]: I1205 15:19:08.689286 4840 generic.go:334] "Generic (PLEG): container finished" podID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerID="1e21d80f6ee09b18e18b9093db304b2584ec6f6ed2c9717d48c0c8a38e67a6e0" exitCode=0 Dec 05 15:19:08 crc kubenswrapper[4840]: I1205 15:19:08.689495 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerDied","Data":"1e21d80f6ee09b18e18b9093db304b2584ec6f6ed2c9717d48c0c8a38e67a6e0"} Dec 05 15:19:12 crc kubenswrapper[4840]: I1205 15:19:12.235149 4840 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod1a917f9e-3ca3-49c8-9e7f-f6b7b2835961"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod1a917f9e-3ca3-49c8-9e7f-f6b7b2835961] : Timed out while waiting for systemd to remove kubepods-besteffort-pod1a917f9e_3ca3_49c8_9e7f_f6b7b2835961.slice" Dec 05 15:19:12 crc kubenswrapper[4840]: I1205 15:19:12.434852 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-67c65cc6bd-s49k5" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.034108 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.116451 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-log-httpd\") pod \"43119c92-a88a-4cf5-8ed5-0fb88578a642\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.116599 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-sg-core-conf-yaml\") pod \"43119c92-a88a-4cf5-8ed5-0fb88578a642\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.116685 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-run-httpd\") pod \"43119c92-a88a-4cf5-8ed5-0fb88578a642\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.116750 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-combined-ca-bundle\") pod \"43119c92-a88a-4cf5-8ed5-0fb88578a642\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.116770 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-config-data\") pod \"43119c92-a88a-4cf5-8ed5-0fb88578a642\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.116794 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldxsw\" (UniqueName: \"kubernetes.io/projected/43119c92-a88a-4cf5-8ed5-0fb88578a642-kube-api-access-ldxsw\") pod \"43119c92-a88a-4cf5-8ed5-0fb88578a642\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.116815 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-scripts\") pod \"43119c92-a88a-4cf5-8ed5-0fb88578a642\" (UID: \"43119c92-a88a-4cf5-8ed5-0fb88578a642\") " Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.117449 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "43119c92-a88a-4cf5-8ed5-0fb88578a642" (UID: "43119c92-a88a-4cf5-8ed5-0fb88578a642"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.117494 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "43119c92-a88a-4cf5-8ed5-0fb88578a642" (UID: "43119c92-a88a-4cf5-8ed5-0fb88578a642"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.119469 4840 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.119492 4840 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/43119c92-a88a-4cf5-8ed5-0fb88578a642-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.121201 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43119c92-a88a-4cf5-8ed5-0fb88578a642-kube-api-access-ldxsw" (OuterVolumeSpecName: "kube-api-access-ldxsw") pod "43119c92-a88a-4cf5-8ed5-0fb88578a642" (UID: "43119c92-a88a-4cf5-8ed5-0fb88578a642"). InnerVolumeSpecName "kube-api-access-ldxsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.122519 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-scripts" (OuterVolumeSpecName: "scripts") pod "43119c92-a88a-4cf5-8ed5-0fb88578a642" (UID: "43119c92-a88a-4cf5-8ed5-0fb88578a642"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.156702 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "43119c92-a88a-4cf5-8ed5-0fb88578a642" (UID: "43119c92-a88a-4cf5-8ed5-0fb88578a642"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.210811 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43119c92-a88a-4cf5-8ed5-0fb88578a642" (UID: "43119c92-a88a-4cf5-8ed5-0fb88578a642"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.220687 4840 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.220720 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.220730 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldxsw\" (UniqueName: \"kubernetes.io/projected/43119c92-a88a-4cf5-8ed5-0fb88578a642-kube-api-access-ldxsw\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.220742 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.235706 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-config-data" (OuterVolumeSpecName: "config-data") pod "43119c92-a88a-4cf5-8ed5-0fb88578a642" (UID: "43119c92-a88a-4cf5-8ed5-0fb88578a642"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.321820 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43119c92-a88a-4cf5-8ed5-0fb88578a642-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.740251 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-74d87df5bc-m4vp7" event={"ID":"8217ba67-c10d-43b2-8e12-41c6c25aa2da","Type":"ContainerStarted","Data":"7f5c40c8bf94981ae4dbbefa655709baf8b910f0b7a706a6b877088433b71ef9"} Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.740584 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-74d87df5bc-m4vp7" event={"ID":"8217ba67-c10d-43b2-8e12-41c6c25aa2da","Type":"ContainerStarted","Data":"1dd0375b42abdcf6195d6a933ce2b3fc2df835476f72c92394a27a4f20df5c2b"} Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.740622 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.744398 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"43119c92-a88a-4cf5-8ed5-0fb88578a642","Type":"ContainerDied","Data":"8c2f3b532682adbb947bfbb77cc72f71aa5d41605f9db8eb3e9f72e563646c2b"} Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.744453 4840 scope.go:117] "RemoveContainer" containerID="2ab6522cfe2481446f96635ec21e80e3bb7a98bd897c1e0659884a874435effc" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.744607 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.748472 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"304fce22-b828-4844-9db0-13120847afc1","Type":"ContainerStarted","Data":"cfbf974b50920451826fa274546e85f1bd51e39706b6ca907abc265dd51f3b1e"} Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.802970 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.353973902 podStartE2EDuration="11.802948428s" podCreationTimestamp="2025-12-05 15:19:02 +0000 UTC" firstStartedPulling="2025-12-05 15:19:03.401388252 +0000 UTC m=+1221.742450856" lastFinishedPulling="2025-12-05 15:19:12.850362768 +0000 UTC m=+1231.191425382" observedRunningTime="2025-12-05 15:19:13.791812973 +0000 UTC m=+1232.132875587" watchObservedRunningTime="2025-12-05 15:19:13.802948428 +0000 UTC m=+1232.144011042" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.803135 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-74d87df5bc-m4vp7" podStartSLOduration=7.803129393 podStartE2EDuration="7.803129393s" podCreationTimestamp="2025-12-05 15:19:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:19:13.774829462 +0000 UTC m=+1232.115892086" watchObservedRunningTime="2025-12-05 15:19:13.803129393 +0000 UTC m=+1232.144192007" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.810404 4840 scope.go:117] "RemoveContainer" containerID="c1a0242ba49506f2d139a853605a01164c500e4d585c9ee32d16fb49147c9b6e" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.832951 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.846187 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.873938 4840 scope.go:117] "RemoveContainer" containerID="1e21d80f6ee09b18e18b9093db304b2584ec6f6ed2c9717d48c0c8a38e67a6e0" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882015 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:13 crc kubenswrapper[4840]: E1205 15:19:13.882488 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-notification-agent" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882514 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-notification-agent" Dec 05 15:19:13 crc kubenswrapper[4840]: E1205 15:19:13.882530 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-central-agent" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882540 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-central-agent" Dec 05 15:19:13 crc kubenswrapper[4840]: E1205 15:19:13.882575 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="proxy-httpd" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882585 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="proxy-httpd" Dec 05 15:19:13 crc kubenswrapper[4840]: E1205 15:19:13.882603 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="sg-core" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882612 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="sg-core" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882894 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-notification-agent" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882917 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="ceilometer-central-agent" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882937 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="sg-core" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.882953 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" containerName="proxy-httpd" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.887671 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.899739 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.918969 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.919181 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:19:13 crc kubenswrapper[4840]: I1205 15:19:13.967180 4840 scope.go:117] "RemoveContainer" containerID="d20bfffd119f58c66ce71854f59c03009f8cb8ba8c34953b2e93abd73660d44b" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.079306 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43119c92-a88a-4cf5-8ed5-0fb88578a642" path="/var/lib/kubelet/pods/43119c92-a88a-4cf5-8ed5-0fb88578a642/volumes" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.134850 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-config-data\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.134914 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-scripts\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.134950 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-log-httpd\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.134968 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.135545 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-run-httpd\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.135751 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.135816 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcqf6\" (UniqueName: \"kubernetes.io/projected/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-kube-api-access-jcqf6\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.238237 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.238322 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcqf6\" (UniqueName: \"kubernetes.io/projected/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-kube-api-access-jcqf6\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.238410 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-config-data\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.238462 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-scripts\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.238536 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-log-httpd\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.238569 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.239124 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-log-httpd\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.239275 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-run-httpd\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.239686 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-run-httpd\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.243425 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-scripts\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.245034 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-config-data\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.246290 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.248990 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.256717 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcqf6\" (UniqueName: \"kubernetes.io/projected/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-kube-api-access-jcqf6\") pod \"ceilometer-0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.548314 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.666721 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.766072 4840 generic.go:334] "Generic (PLEG): container finished" podID="58a6240b-dbe5-4208-b070-3b84e319505d" containerID="fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7" exitCode=137 Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.766130 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58a6240b-dbe5-4208-b070-3b84e319505d","Type":"ContainerDied","Data":"fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7"} Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.766156 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"58a6240b-dbe5-4208-b070-3b84e319505d","Type":"ContainerDied","Data":"1ea439f28076dfee011a418a573809ac4dd164a81212247235ad85d05557fccc"} Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.766173 4840 scope.go:117] "RemoveContainer" containerID="fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.766264 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.773738 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.794399 4840 scope.go:117] "RemoveContainer" containerID="8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.830559 4840 scope.go:117] "RemoveContainer" containerID="fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7" Dec 05 15:19:14 crc kubenswrapper[4840]: E1205 15:19:14.838114 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7\": container with ID starting with fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7 not found: ID does not exist" containerID="fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.838173 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7"} err="failed to get container status \"fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7\": rpc error: code = NotFound desc = could not find container \"fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7\": container with ID starting with fceee12717220e6b3da59cf7d405bf04f847132c10974dc88ecd2285cec216d7 not found: ID does not exist" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.838212 4840 scope.go:117] "RemoveContainer" containerID="8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47" Dec 05 15:19:14 crc kubenswrapper[4840]: E1205 15:19:14.839037 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47\": container with ID starting with 8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47 not found: ID does not exist" containerID="8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.839061 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47"} err="failed to get container status \"8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47\": rpc error: code = NotFound desc = could not find container \"8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47\": container with ID starting with 8099338a7fddec2ff27849f9d27343b9597493a5797e08f6d86400dbfa213a47 not found: ID does not exist" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.850594 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data-custom\") pod \"58a6240b-dbe5-4208-b070-3b84e319505d\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.850730 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58a6240b-dbe5-4208-b070-3b84e319505d-logs\") pod \"58a6240b-dbe5-4208-b070-3b84e319505d\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.850805 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-scripts\") pod \"58a6240b-dbe5-4208-b070-3b84e319505d\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.850831 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data\") pod \"58a6240b-dbe5-4208-b070-3b84e319505d\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.850913 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bll58\" (UniqueName: \"kubernetes.io/projected/58a6240b-dbe5-4208-b070-3b84e319505d-kube-api-access-bll58\") pod \"58a6240b-dbe5-4208-b070-3b84e319505d\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.851021 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-combined-ca-bundle\") pod \"58a6240b-dbe5-4208-b070-3b84e319505d\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.851070 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58a6240b-dbe5-4208-b070-3b84e319505d-etc-machine-id\") pod \"58a6240b-dbe5-4208-b070-3b84e319505d\" (UID: \"58a6240b-dbe5-4208-b070-3b84e319505d\") " Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.851259 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/58a6240b-dbe5-4208-b070-3b84e319505d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "58a6240b-dbe5-4208-b070-3b84e319505d" (UID: "58a6240b-dbe5-4208-b070-3b84e319505d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.852398 4840 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/58a6240b-dbe5-4208-b070-3b84e319505d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.853142 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58a6240b-dbe5-4208-b070-3b84e319505d-logs" (OuterVolumeSpecName: "logs") pod "58a6240b-dbe5-4208-b070-3b84e319505d" (UID: "58a6240b-dbe5-4208-b070-3b84e319505d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.856792 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58a6240b-dbe5-4208-b070-3b84e319505d-kube-api-access-bll58" (OuterVolumeSpecName: "kube-api-access-bll58") pod "58a6240b-dbe5-4208-b070-3b84e319505d" (UID: "58a6240b-dbe5-4208-b070-3b84e319505d"). InnerVolumeSpecName "kube-api-access-bll58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.859087 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-scripts" (OuterVolumeSpecName: "scripts") pod "58a6240b-dbe5-4208-b070-3b84e319505d" (UID: "58a6240b-dbe5-4208-b070-3b84e319505d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.865086 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "58a6240b-dbe5-4208-b070-3b84e319505d" (UID: "58a6240b-dbe5-4208-b070-3b84e319505d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.882042 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58a6240b-dbe5-4208-b070-3b84e319505d" (UID: "58a6240b-dbe5-4208-b070-3b84e319505d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:14 crc kubenswrapper[4840]: I1205 15:19:14.914059 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data" (OuterVolumeSpecName: "config-data") pod "58a6240b-dbe5-4208-b070-3b84e319505d" (UID: "58a6240b-dbe5-4208-b070-3b84e319505d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:14.990783 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58a6240b-dbe5-4208-b070-3b84e319505d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:14.990805 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:14.990816 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:14.990828 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bll58\" (UniqueName: \"kubernetes.io/projected/58a6240b-dbe5-4208-b070-3b84e319505d-kube-api-access-bll58\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:14.990836 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:14.990843 4840 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/58a6240b-dbe5-4208-b070-3b84e319505d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.037351 4840 scope.go:117] "RemoveContainer" containerID="c4d0d7b3d01beb333b25bbd2e9e21459eee897b9d9aa2426ad84d23d9fe58e34" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.079905 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.146183 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.154564 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.178613 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:19:15 crc kubenswrapper[4840]: E1205 15:19:15.179382 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.179524 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api" Dec 05 15:19:15 crc kubenswrapper[4840]: E1205 15:19:15.179615 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api-log" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.179690 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api-log" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.179978 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.180071 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" containerName="cinder-api-log" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.181351 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.185848 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.186025 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.186032 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.186928 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.276775 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.295463 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2379ec8-f983-42df-9255-2a97b8589b6d-logs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.295524 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b2379ec8-f983-42df-9255-2a97b8589b6d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.295722 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.295844 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.295957 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-config-data-custom\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.296030 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-scripts\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.296055 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws4tk\" (UniqueName: \"kubernetes.io/projected/b2379ec8-f983-42df-9255-2a97b8589b6d-kube-api-access-ws4tk\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.296183 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-config-data\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.296206 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398674 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-scripts\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398721 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws4tk\" (UniqueName: \"kubernetes.io/projected/b2379ec8-f983-42df-9255-2a97b8589b6d-kube-api-access-ws4tk\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398782 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-config-data\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398797 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398836 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2379ec8-f983-42df-9255-2a97b8589b6d-logs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398881 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b2379ec8-f983-42df-9255-2a97b8589b6d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398928 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.398966 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.399003 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-config-data-custom\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.399061 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b2379ec8-f983-42df-9255-2a97b8589b6d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.400468 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2379ec8-f983-42df-9255-2a97b8589b6d-logs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.411274 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.411665 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-config-data\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.413663 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.413908 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-scripts\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.415217 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.417514 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2379ec8-f983-42df-9255-2a97b8589b6d-config-data-custom\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.420903 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws4tk\" (UniqueName: \"kubernetes.io/projected/b2379ec8-f983-42df-9255-2a97b8589b6d-kube-api-access-ws4tk\") pod \"cinder-api-0\" (UID: \"b2379ec8-f983-42df-9255-2a97b8589b6d\") " pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.496434 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 15:19:15 crc kubenswrapper[4840]: I1205 15:19:15.829303 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerStarted","Data":"a769a38fe75d5c6125a91e6bc7646101dc3944120140a374a123b5baf188b0d1"} Dec 05 15:19:16 crc kubenswrapper[4840]: I1205 15:19:16.075347 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58a6240b-dbe5-4208-b070-3b84e319505d" path="/var/lib/kubelet/pods/58a6240b-dbe5-4208-b070-3b84e319505d/volumes" Dec 05 15:19:16 crc kubenswrapper[4840]: I1205 15:19:16.205756 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 15:19:16 crc kubenswrapper[4840]: I1205 15:19:16.850308 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerStarted","Data":"4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50"} Dec 05 15:19:16 crc kubenswrapper[4840]: I1205 15:19:16.854512 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b2379ec8-f983-42df-9255-2a97b8589b6d","Type":"ContainerStarted","Data":"29f7223d3b9c6df39ba24eba363f8ec693de241c410c4d73948a0590e0d6ca95"} Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.381046 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-gtmvq"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.382635 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.394683 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gtmvq"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.417918 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.418256 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-log" containerID="cri-o://7caadfdbb2233ff8d9af4ed20fed744814ac639a14bb366f509bc793300360b8" gracePeriod=30 Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.418462 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-httpd" containerID="cri-o://f7c846a940e475e325e21699b3a1056c61b5233111e0b0b7c113450ca39dae31" gracePeriod=30 Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.431572 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f53be4d9-c650-4344-8a6f-79258ea44a6c-operator-scripts\") pod \"nova-api-db-create-gtmvq\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.431704 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7d26\" (UniqueName: \"kubernetes.io/projected/f53be4d9-c650-4344-8a6f-79258ea44a6c-kube-api-access-f7d26\") pod \"nova-api-db-create-gtmvq\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.477876 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-m2tqf"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.479172 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.760456 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7d26\" (UniqueName: \"kubernetes.io/projected/f53be4d9-c650-4344-8a6f-79258ea44a6c-kube-api-access-f7d26\") pod \"nova-api-db-create-gtmvq\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.760588 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpcf8\" (UniqueName: \"kubernetes.io/projected/72d018e0-5ef5-45a1-914e-e733965b1089-kube-api-access-tpcf8\") pod \"nova-cell0-db-create-m2tqf\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.760628 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72d018e0-5ef5-45a1-914e-e733965b1089-operator-scripts\") pod \"nova-cell0-db-create-m2tqf\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.760661 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f53be4d9-c650-4344-8a6f-79258ea44a6c-operator-scripts\") pod \"nova-api-db-create-gtmvq\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.762342 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f53be4d9-c650-4344-8a6f-79258ea44a6c-operator-scripts\") pod \"nova-api-db-create-gtmvq\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.782540 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-m2tqf"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.793069 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-09f2-account-create-update-m5w9w"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.813210 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-09f2-account-create-update-m5w9w"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.813314 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.819391 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.847628 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7d26\" (UniqueName: \"kubernetes.io/projected/f53be4d9-c650-4344-8a6f-79258ea44a6c-kube-api-access-f7d26\") pod \"nova-api-db-create-gtmvq\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.865496 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpcf8\" (UniqueName: \"kubernetes.io/projected/72d018e0-5ef5-45a1-914e-e733965b1089-kube-api-access-tpcf8\") pod \"nova-cell0-db-create-m2tqf\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.865572 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72d018e0-5ef5-45a1-914e-e733965b1089-operator-scripts\") pod \"nova-cell0-db-create-m2tqf\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.865603 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xtgl\" (UniqueName: \"kubernetes.io/projected/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-kube-api-access-4xtgl\") pod \"nova-api-09f2-account-create-update-m5w9w\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.865691 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-operator-scripts\") pod \"nova-api-09f2-account-create-update-m5w9w\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.869078 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72d018e0-5ef5-45a1-914e-e733965b1089-operator-scripts\") pod \"nova-cell0-db-create-m2tqf\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.929140 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-j458g"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.931093 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.932120 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerStarted","Data":"f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74"} Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.941103 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b2379ec8-f983-42df-9255-2a97b8589b6d","Type":"ContainerStarted","Data":"5e40a32142cb70ca50da980d9606b6fbcc7b8cd61db40232b41fb92e35f907b7"} Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.946327 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-47ef-account-create-update-9v4gn"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.948805 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.952163 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.952568 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpcf8\" (UniqueName: \"kubernetes.io/projected/72d018e0-5ef5-45a1-914e-e733965b1089-kube-api-access-tpcf8\") pod \"nova-cell0-db-create-m2tqf\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.956459 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-j458g"] Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.957007 4840 generic.go:334] "Generic (PLEG): container finished" podID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerID="7caadfdbb2233ff8d9af4ed20fed744814ac639a14bb366f509bc793300360b8" exitCode=143 Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.957226 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"231dd866-8fc7-4d81-a391-8cfc74561bd6","Type":"ContainerDied","Data":"7caadfdbb2233ff8d9af4ed20fed744814ac639a14bb366f509bc793300360b8"} Dec 05 15:19:17 crc kubenswrapper[4840]: I1205 15:19:17.981093 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-47ef-account-create-update-9v4gn"] Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.014294 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b6a2385-87c2-4077-a39a-440e4e3d3e51-operator-scripts\") pod \"nova-cell0-47ef-account-create-update-9v4gn\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.014513 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9w8n\" (UniqueName: \"kubernetes.io/projected/48ad6b4f-c38d-40f9-9348-1af570cb7f35-kube-api-access-h9w8n\") pod \"nova-cell1-db-create-j458g\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.014653 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48ad6b4f-c38d-40f9-9348-1af570cb7f35-operator-scripts\") pod \"nova-cell1-db-create-j458g\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.014724 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xtgl\" (UniqueName: \"kubernetes.io/projected/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-kube-api-access-4xtgl\") pod \"nova-api-09f2-account-create-update-m5w9w\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.014930 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xqql\" (UniqueName: \"kubernetes.io/projected/8b6a2385-87c2-4077-a39a-440e4e3d3e51-kube-api-access-2xqql\") pod \"nova-cell0-47ef-account-create-update-9v4gn\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.015098 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-operator-scripts\") pod \"nova-api-09f2-account-create-update-m5w9w\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.017722 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-operator-scripts\") pod \"nova-api-09f2-account-create-update-m5w9w\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.034282 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.055946 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xtgl\" (UniqueName: \"kubernetes.io/projected/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-kube-api-access-4xtgl\") pod \"nova-api-09f2-account-create-update-m5w9w\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.123264 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9w8n\" (UniqueName: \"kubernetes.io/projected/48ad6b4f-c38d-40f9-9348-1af570cb7f35-kube-api-access-h9w8n\") pod \"nova-cell1-db-create-j458g\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.123316 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48ad6b4f-c38d-40f9-9348-1af570cb7f35-operator-scripts\") pod \"nova-cell1-db-create-j458g\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.123375 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xqql\" (UniqueName: \"kubernetes.io/projected/8b6a2385-87c2-4077-a39a-440e4e3d3e51-kube-api-access-2xqql\") pod \"nova-cell0-47ef-account-create-update-9v4gn\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.123518 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b6a2385-87c2-4077-a39a-440e4e3d3e51-operator-scripts\") pod \"nova-cell0-47ef-account-create-update-9v4gn\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.124596 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b6a2385-87c2-4077-a39a-440e4e3d3e51-operator-scripts\") pod \"nova-cell0-47ef-account-create-update-9v4gn\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.125594 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48ad6b4f-c38d-40f9-9348-1af570cb7f35-operator-scripts\") pod \"nova-cell1-db-create-j458g\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.136503 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-1cfe-account-create-update-g6pb6"] Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.137972 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.143088 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.145048 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9w8n\" (UniqueName: \"kubernetes.io/projected/48ad6b4f-c38d-40f9-9348-1af570cb7f35-kube-api-access-h9w8n\") pod \"nova-cell1-db-create-j458g\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.150501 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xqql\" (UniqueName: \"kubernetes.io/projected/8b6a2385-87c2-4077-a39a-440e4e3d3e51-kube-api-access-2xqql\") pod \"nova-cell0-47ef-account-create-update-9v4gn\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.151211 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1cfe-account-create-update-g6pb6"] Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.230049 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07957586-8bfc-4935-a092-2245ac49771a-operator-scripts\") pod \"nova-cell1-1cfe-account-create-update-g6pb6\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.230450 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pspjc\" (UniqueName: \"kubernetes.io/projected/07957586-8bfc-4935-a092-2245ac49771a-kube-api-access-pspjc\") pod \"nova-cell1-1cfe-account-create-update-g6pb6\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.245823 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.316807 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.327217 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.331650 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pspjc\" (UniqueName: \"kubernetes.io/projected/07957586-8bfc-4935-a092-2245ac49771a-kube-api-access-pspjc\") pod \"nova-cell1-1cfe-account-create-update-g6pb6\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.331716 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07957586-8bfc-4935-a092-2245ac49771a-operator-scripts\") pod \"nova-cell1-1cfe-account-create-update-g6pb6\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.332852 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07957586-8bfc-4935-a092-2245ac49771a-operator-scripts\") pod \"nova-cell1-1cfe-account-create-update-g6pb6\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.349799 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pspjc\" (UniqueName: \"kubernetes.io/projected/07957586-8bfc-4935-a092-2245ac49771a-kube-api-access-pspjc\") pod \"nova-cell1-1cfe-account-create-update-g6pb6\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.390022 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.461834 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.606960 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gtmvq"] Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.858636 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-m2tqf"] Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.979391 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerStarted","Data":"c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1"} Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.984977 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtmvq" event={"ID":"f53be4d9-c650-4344-8a6f-79258ea44a6c","Type":"ContainerStarted","Data":"7b4f0d0c0c1e46f4196815c07d8a0d09bc799e18e016c36e09d921a37880cde8"} Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.996093 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b2379ec8-f983-42df-9255-2a97b8589b6d","Type":"ContainerStarted","Data":"78766859a17a6362b9ceda4cc082f4d20ca90774a120426ed3704d18b7c06e14"} Dec 05 15:19:18 crc kubenswrapper[4840]: I1205 15:19:18.997403 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 15:19:19 crc kubenswrapper[4840]: I1205 15:19:19.005323 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-m2tqf" event={"ID":"72d018e0-5ef5-45a1-914e-e733965b1089","Type":"ContainerStarted","Data":"49f017a2739edc9b73de12a89501c99c4a8a0ad33175f16be5452d53f835aa2a"} Dec 05 15:19:19 crc kubenswrapper[4840]: I1205 15:19:19.136471 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.136448813 podStartE2EDuration="4.136448813s" podCreationTimestamp="2025-12-05 15:19:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:19:19.034968607 +0000 UTC m=+1237.376031241" watchObservedRunningTime="2025-12-05 15:19:19.136448813 +0000 UTC m=+1237.477511427" Dec 05 15:19:19 crc kubenswrapper[4840]: I1205 15:19:19.148387 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-j458g"] Dec 05 15:19:19 crc kubenswrapper[4840]: I1205 15:19:19.320789 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-09f2-account-create-update-m5w9w"] Dec 05 15:19:19 crc kubenswrapper[4840]: I1205 15:19:19.407471 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-47ef-account-create-update-9v4gn"] Dec 05 15:19:19 crc kubenswrapper[4840]: I1205 15:19:19.429499 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1cfe-account-create-update-g6pb6"] Dec 05 15:19:19 crc kubenswrapper[4840]: W1205 15:19:19.446031 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07957586_8bfc_4935_a092_2245ac49771a.slice/crio-e5574c4f63e9f628476cdd821af5e89332da72ac5f7288363fe504ec2771bd69 WatchSource:0}: Error finding container e5574c4f63e9f628476cdd821af5e89332da72ac5f7288363fe504ec2771bd69: Status 404 returned error can't find the container with id e5574c4f63e9f628476cdd821af5e89332da72ac5f7288363fe504ec2771bd69 Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.024059 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" event={"ID":"07957586-8bfc-4935-a092-2245ac49771a","Type":"ContainerStarted","Data":"ff4cd977d2839608225ad43df4395d5880a4d6a4dcce5f35f885b3a0dffe65d9"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.024423 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" event={"ID":"07957586-8bfc-4935-a092-2245ac49771a","Type":"ContainerStarted","Data":"e5574c4f63e9f628476cdd821af5e89332da72ac5f7288363fe504ec2771bd69"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.032790 4840 generic.go:334] "Generic (PLEG): container finished" podID="48ad6b4f-c38d-40f9-9348-1af570cb7f35" containerID="acd03ef676da987ff3d92787514e19ff67950047796874abb4684f85420b1603" exitCode=0 Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.032851 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-j458g" event={"ID":"48ad6b4f-c38d-40f9-9348-1af570cb7f35","Type":"ContainerDied","Data":"acd03ef676da987ff3d92787514e19ff67950047796874abb4684f85420b1603"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.032895 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-j458g" event={"ID":"48ad6b4f-c38d-40f9-9348-1af570cb7f35","Type":"ContainerStarted","Data":"22943cb6dd5cf7b72c6f47272403f24f49ac15de0a23f8ebee479d6ddc2b8c2c"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.037843 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" event={"ID":"8b6a2385-87c2-4077-a39a-440e4e3d3e51","Type":"ContainerStarted","Data":"f8ec05afa270f81e987ca39d111711c944f93906790227bfb7847599ae2adb0d"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.037894 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" event={"ID":"8b6a2385-87c2-4077-a39a-440e4e3d3e51","Type":"ContainerStarted","Data":"f6aad75ce68d84323914d780b00dea4628a6a8389db03629ccb63422bdebce3a"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.039892 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-09f2-account-create-update-m5w9w" event={"ID":"b1e31d97-cf03-4d2a-abd2-14cf20e60d90","Type":"ContainerStarted","Data":"25a7b4eb05f2ded88ad31c3bea9c2da18a7d3f6c2966a7aca75748f67e0440bf"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.039929 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-09f2-account-create-update-m5w9w" event={"ID":"b1e31d97-cf03-4d2a-abd2-14cf20e60d90","Type":"ContainerStarted","Data":"99f6fef8946e55a9b2c0ed48c51bc2da883d837321a527c844682c86baeb1937"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.042584 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" podStartSLOduration=2.042568701 podStartE2EDuration="2.042568701s" podCreationTimestamp="2025-12-05 15:19:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:19:20.041726837 +0000 UTC m=+1238.382789451" watchObservedRunningTime="2025-12-05 15:19:20.042568701 +0000 UTC m=+1238.383631315" Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.045373 4840 generic.go:334] "Generic (PLEG): container finished" podID="72d018e0-5ef5-45a1-914e-e733965b1089" containerID="028c7689d356b5f46f9b8d1571b365c89b6e769ef46ba15e3f792e7163c005ff" exitCode=0 Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.045478 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-m2tqf" event={"ID":"72d018e0-5ef5-45a1-914e-e733965b1089","Type":"ContainerDied","Data":"028c7689d356b5f46f9b8d1571b365c89b6e769ef46ba15e3f792e7163c005ff"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.047347 4840 generic.go:334] "Generic (PLEG): container finished" podID="f53be4d9-c650-4344-8a6f-79258ea44a6c" containerID="33d80cef45fe10e356f62de1e37db8ebc8fc9842eaeaa667280c7df351bd4787" exitCode=0 Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.047391 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtmvq" event={"ID":"f53be4d9-c650-4344-8a6f-79258ea44a6c","Type":"ContainerDied","Data":"33d80cef45fe10e356f62de1e37db8ebc8fc9842eaeaa667280c7df351bd4787"} Dec 05 15:19:20 crc kubenswrapper[4840]: I1205 15:19:20.069556 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" podStartSLOduration=3.06953489 podStartE2EDuration="3.06953489s" podCreationTimestamp="2025-12-05 15:19:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:19:20.063545792 +0000 UTC m=+1238.404608416" watchObservedRunningTime="2025-12-05 15:19:20.06953489 +0000 UTC m=+1238.410597504" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.062815 4840 generic.go:334] "Generic (PLEG): container finished" podID="b1e31d97-cf03-4d2a-abd2-14cf20e60d90" containerID="25a7b4eb05f2ded88ad31c3bea9c2da18a7d3f6c2966a7aca75748f67e0440bf" exitCode=0 Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.063714 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-09f2-account-create-update-m5w9w" event={"ID":"b1e31d97-cf03-4d2a-abd2-14cf20e60d90","Type":"ContainerDied","Data":"25a7b4eb05f2ded88ad31c3bea9c2da18a7d3f6c2966a7aca75748f67e0440bf"} Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.067886 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerStarted","Data":"95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e"} Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.068033 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-central-agent" containerID="cri-o://4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50" gracePeriod=30 Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.068261 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="proxy-httpd" containerID="cri-o://95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e" gracePeriod=30 Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.068300 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-notification-agent" containerID="cri-o://f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74" gracePeriod=30 Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.068357 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.068312 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="sg-core" containerID="cri-o://c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1" gracePeriod=30 Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.076814 4840 generic.go:334] "Generic (PLEG): container finished" podID="07957586-8bfc-4935-a092-2245ac49771a" containerID="ff4cd977d2839608225ad43df4395d5880a4d6a4dcce5f35f885b3a0dffe65d9" exitCode=0 Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.077011 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" event={"ID":"07957586-8bfc-4935-a092-2245ac49771a","Type":"ContainerDied","Data":"ff4cd977d2839608225ad43df4395d5880a4d6a4dcce5f35f885b3a0dffe65d9"} Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.083597 4840 generic.go:334] "Generic (PLEG): container finished" podID="8b6a2385-87c2-4077-a39a-440e4e3d3e51" containerID="f8ec05afa270f81e987ca39d111711c944f93906790227bfb7847599ae2adb0d" exitCode=0 Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.083655 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" event={"ID":"8b6a2385-87c2-4077-a39a-440e4e3d3e51","Type":"ContainerDied","Data":"f8ec05afa270f81e987ca39d111711c944f93906790227bfb7847599ae2adb0d"} Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.097782 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.501841628 podStartE2EDuration="8.097761926s" podCreationTimestamp="2025-12-05 15:19:13 +0000 UTC" firstStartedPulling="2025-12-05 15:19:15.094723316 +0000 UTC m=+1233.435785930" lastFinishedPulling="2025-12-05 15:19:19.690643624 +0000 UTC m=+1238.031706228" observedRunningTime="2025-12-05 15:19:21.093188847 +0000 UTC m=+1239.434251471" watchObservedRunningTime="2025-12-05 15:19:21.097761926 +0000 UTC m=+1239.438824540" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.579686 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.590992 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.599680 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.607529 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620261 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f53be4d9-c650-4344-8a6f-79258ea44a6c-operator-scripts\") pod \"f53be4d9-c650-4344-8a6f-79258ea44a6c\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620315 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48ad6b4f-c38d-40f9-9348-1af570cb7f35-operator-scripts\") pod \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620376 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7d26\" (UniqueName: \"kubernetes.io/projected/f53be4d9-c650-4344-8a6f-79258ea44a6c-kube-api-access-f7d26\") pod \"f53be4d9-c650-4344-8a6f-79258ea44a6c\" (UID: \"f53be4d9-c650-4344-8a6f-79258ea44a6c\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620407 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72d018e0-5ef5-45a1-914e-e733965b1089-operator-scripts\") pod \"72d018e0-5ef5-45a1-914e-e733965b1089\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620447 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-operator-scripts\") pod \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620478 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9w8n\" (UniqueName: \"kubernetes.io/projected/48ad6b4f-c38d-40f9-9348-1af570cb7f35-kube-api-access-h9w8n\") pod \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\" (UID: \"48ad6b4f-c38d-40f9-9348-1af570cb7f35\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620551 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xtgl\" (UniqueName: \"kubernetes.io/projected/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-kube-api-access-4xtgl\") pod \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\" (UID: \"b1e31d97-cf03-4d2a-abd2-14cf20e60d90\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.620574 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpcf8\" (UniqueName: \"kubernetes.io/projected/72d018e0-5ef5-45a1-914e-e733965b1089-kube-api-access-tpcf8\") pod \"72d018e0-5ef5-45a1-914e-e733965b1089\" (UID: \"72d018e0-5ef5-45a1-914e-e733965b1089\") " Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.622654 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d018e0-5ef5-45a1-914e-e733965b1089-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72d018e0-5ef5-45a1-914e-e733965b1089" (UID: "72d018e0-5ef5-45a1-914e-e733965b1089"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.623574 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f53be4d9-c650-4344-8a6f-79258ea44a6c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f53be4d9-c650-4344-8a6f-79258ea44a6c" (UID: "f53be4d9-c650-4344-8a6f-79258ea44a6c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.624187 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48ad6b4f-c38d-40f9-9348-1af570cb7f35-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "48ad6b4f-c38d-40f9-9348-1af570cb7f35" (UID: "48ad6b4f-c38d-40f9-9348-1af570cb7f35"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.624836 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b1e31d97-cf03-4d2a-abd2-14cf20e60d90" (UID: "b1e31d97-cf03-4d2a-abd2-14cf20e60d90"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.629561 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72d018e0-5ef5-45a1-914e-e733965b1089-kube-api-access-tpcf8" (OuterVolumeSpecName: "kube-api-access-tpcf8") pod "72d018e0-5ef5-45a1-914e-e733965b1089" (UID: "72d018e0-5ef5-45a1-914e-e733965b1089"). InnerVolumeSpecName "kube-api-access-tpcf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.642348 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f53be4d9-c650-4344-8a6f-79258ea44a6c-kube-api-access-f7d26" (OuterVolumeSpecName: "kube-api-access-f7d26") pod "f53be4d9-c650-4344-8a6f-79258ea44a6c" (UID: "f53be4d9-c650-4344-8a6f-79258ea44a6c"). InnerVolumeSpecName "kube-api-access-f7d26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.642410 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48ad6b4f-c38d-40f9-9348-1af570cb7f35-kube-api-access-h9w8n" (OuterVolumeSpecName: "kube-api-access-h9w8n") pod "48ad6b4f-c38d-40f9-9348-1af570cb7f35" (UID: "48ad6b4f-c38d-40f9-9348-1af570cb7f35"). InnerVolumeSpecName "kube-api-access-h9w8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.642492 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-kube-api-access-4xtgl" (OuterVolumeSpecName: "kube-api-access-4xtgl") pod "b1e31d97-cf03-4d2a-abd2-14cf20e60d90" (UID: "b1e31d97-cf03-4d2a-abd2-14cf20e60d90"). InnerVolumeSpecName "kube-api-access-4xtgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723118 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f53be4d9-c650-4344-8a6f-79258ea44a6c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723151 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48ad6b4f-c38d-40f9-9348-1af570cb7f35-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723161 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7d26\" (UniqueName: \"kubernetes.io/projected/f53be4d9-c650-4344-8a6f-79258ea44a6c-kube-api-access-f7d26\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723172 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72d018e0-5ef5-45a1-914e-e733965b1089-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723181 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723189 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9w8n\" (UniqueName: \"kubernetes.io/projected/48ad6b4f-c38d-40f9-9348-1af570cb7f35-kube-api-access-h9w8n\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723198 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xtgl\" (UniqueName: \"kubernetes.io/projected/b1e31d97-cf03-4d2a-abd2-14cf20e60d90-kube-api-access-4xtgl\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.723207 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpcf8\" (UniqueName: \"kubernetes.io/projected/72d018e0-5ef5-45a1-914e-e733965b1089-kube-api-access-tpcf8\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.979558 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:21 crc kubenswrapper[4840]: I1205 15:19:21.982209 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-74d87df5bc-m4vp7" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.106020 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-j458g" event={"ID":"48ad6b4f-c38d-40f9-9348-1af570cb7f35","Type":"ContainerDied","Data":"22943cb6dd5cf7b72c6f47272403f24f49ac15de0a23f8ebee479d6ddc2b8c2c"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.106365 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22943cb6dd5cf7b72c6f47272403f24f49ac15de0a23f8ebee479d6ddc2b8c2c" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.106475 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-j458g" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.112864 4840 generic.go:334] "Generic (PLEG): container finished" podID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerID="f7c846a940e475e325e21699b3a1056c61b5233111e0b0b7c113450ca39dae31" exitCode=0 Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.112917 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"231dd866-8fc7-4d81-a391-8cfc74561bd6","Type":"ContainerDied","Data":"f7c846a940e475e325e21699b3a1056c61b5233111e0b0b7c113450ca39dae31"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.115777 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-09f2-account-create-update-m5w9w" event={"ID":"b1e31d97-cf03-4d2a-abd2-14cf20e60d90","Type":"ContainerDied","Data":"99f6fef8946e55a9b2c0ed48c51bc2da883d837321a527c844682c86baeb1937"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.115812 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99f6fef8946e55a9b2c0ed48c51bc2da883d837321a527c844682c86baeb1937" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.115920 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-09f2-account-create-update-m5w9w" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.120290 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-m2tqf" event={"ID":"72d018e0-5ef5-45a1-914e-e733965b1089","Type":"ContainerDied","Data":"49f017a2739edc9b73de12a89501c99c4a8a0ad33175f16be5452d53f835aa2a"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.120330 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49f017a2739edc9b73de12a89501c99c4a8a0ad33175f16be5452d53f835aa2a" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.120408 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-m2tqf" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.126216 4840 generic.go:334] "Generic (PLEG): container finished" podID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerID="95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e" exitCode=0 Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.126244 4840 generic.go:334] "Generic (PLEG): container finished" podID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerID="c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1" exitCode=2 Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.126252 4840 generic.go:334] "Generic (PLEG): container finished" podID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerID="f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74" exitCode=0 Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.126299 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerDied","Data":"95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.126328 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerDied","Data":"c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.126339 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerDied","Data":"f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.130679 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gtmvq" event={"ID":"f53be4d9-c650-4344-8a6f-79258ea44a6c","Type":"ContainerDied","Data":"7b4f0d0c0c1e46f4196815c07d8a0d09bc799e18e016c36e09d921a37880cde8"} Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.130726 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b4f0d0c0c1e46f4196815c07d8a0d09bc799e18e016c36e09d921a37880cde8" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.130794 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gtmvq" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.448807 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-67c65cc6bd-s49k5" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.448954 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.788032 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.795803 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.811615 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.871738 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b6a2385-87c2-4077-a39a-440e4e3d3e51-operator-scripts\") pod \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.871806 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-scripts\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.871847 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-logs\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.871935 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-internal-tls-certs\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.871954 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-combined-ca-bundle\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.871968 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.871981 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-config-data\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.872026 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pspjc\" (UniqueName: \"kubernetes.io/projected/07957586-8bfc-4935-a092-2245ac49771a-kube-api-access-pspjc\") pod \"07957586-8bfc-4935-a092-2245ac49771a\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.872109 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rjf2\" (UniqueName: \"kubernetes.io/projected/231dd866-8fc7-4d81-a391-8cfc74561bd6-kube-api-access-8rjf2\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.872132 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07957586-8bfc-4935-a092-2245ac49771a-operator-scripts\") pod \"07957586-8bfc-4935-a092-2245ac49771a\" (UID: \"07957586-8bfc-4935-a092-2245ac49771a\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.872149 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xqql\" (UniqueName: \"kubernetes.io/projected/8b6a2385-87c2-4077-a39a-440e4e3d3e51-kube-api-access-2xqql\") pod \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\" (UID: \"8b6a2385-87c2-4077-a39a-440e4e3d3e51\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.872209 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-httpd-run\") pod \"231dd866-8fc7-4d81-a391-8cfc74561bd6\" (UID: \"231dd866-8fc7-4d81-a391-8cfc74561bd6\") " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.873034 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.874283 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b6a2385-87c2-4077-a39a-440e4e3d3e51-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b6a2385-87c2-4077-a39a-440e4e3d3e51" (UID: "8b6a2385-87c2-4077-a39a-440e4e3d3e51"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.878722 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07957586-8bfc-4935-a092-2245ac49771a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "07957586-8bfc-4935-a092-2245ac49771a" (UID: "07957586-8bfc-4935-a092-2245ac49771a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.880957 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.882837 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-logs" (OuterVolumeSpecName: "logs") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.882950 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b6a2385-87c2-4077-a39a-440e4e3d3e51-kube-api-access-2xqql" (OuterVolumeSpecName: "kube-api-access-2xqql") pod "8b6a2385-87c2-4077-a39a-440e4e3d3e51" (UID: "8b6a2385-87c2-4077-a39a-440e4e3d3e51"). InnerVolumeSpecName "kube-api-access-2xqql". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.888079 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07957586-8bfc-4935-a092-2245ac49771a-kube-api-access-pspjc" (OuterVolumeSpecName: "kube-api-access-pspjc") pod "07957586-8bfc-4935-a092-2245ac49771a" (UID: "07957586-8bfc-4935-a092-2245ac49771a"). InnerVolumeSpecName "kube-api-access-pspjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.895198 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/231dd866-8fc7-4d81-a391-8cfc74561bd6-kube-api-access-8rjf2" (OuterVolumeSpecName: "kube-api-access-8rjf2") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "kube-api-access-8rjf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.906312 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-scripts" (OuterVolumeSpecName: "scripts") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.923935 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.956644 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-config-data" (OuterVolumeSpecName: "config-data") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.959589 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "231dd866-8fc7-4d81-a391-8cfc74561bd6" (UID: "231dd866-8fc7-4d81-a391-8cfc74561bd6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.973941 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pspjc\" (UniqueName: \"kubernetes.io/projected/07957586-8bfc-4935-a092-2245ac49771a-kube-api-access-pspjc\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.973971 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rjf2\" (UniqueName: \"kubernetes.io/projected/231dd866-8fc7-4d81-a391-8cfc74561bd6-kube-api-access-8rjf2\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.973984 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07957586-8bfc-4935-a092-2245ac49771a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.973999 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xqql\" (UniqueName: \"kubernetes.io/projected/8b6a2385-87c2-4077-a39a-440e4e3d3e51-kube-api-access-2xqql\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974011 4840 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974023 4840 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b6a2385-87c2-4077-a39a-440e4e3d3e51-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974034 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974044 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/231dd866-8fc7-4d81-a391-8cfc74561bd6-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974053 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974063 4840 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974103 4840 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 15:19:22 crc kubenswrapper[4840]: I1205 15:19:22.974114 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/231dd866-8fc7-4d81-a391-8cfc74561bd6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.005468 4840 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.077232 4840 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.141020 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"231dd866-8fc7-4d81-a391-8cfc74561bd6","Type":"ContainerDied","Data":"c5c0520f4be89bba2d8927849e9c3c69fce59a386350bb5a92ce37907aa8dcd9"} Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.141045 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.141076 4840 scope.go:117] "RemoveContainer" containerID="f7c846a940e475e325e21699b3a1056c61b5233111e0b0b7c113450ca39dae31" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.142890 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" event={"ID":"8b6a2385-87c2-4077-a39a-440e4e3d3e51","Type":"ContainerDied","Data":"f6aad75ce68d84323914d780b00dea4628a6a8389db03629ccb63422bdebce3a"} Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.142947 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6aad75ce68d84323914d780b00dea4628a6a8389db03629ccb63422bdebce3a" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.142916 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47ef-account-create-update-9v4gn" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.144328 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" event={"ID":"07957586-8bfc-4935-a092-2245ac49771a","Type":"ContainerDied","Data":"e5574c4f63e9f628476cdd821af5e89332da72ac5f7288363fe504ec2771bd69"} Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.144368 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5574c4f63e9f628476cdd821af5e89332da72ac5f7288363fe504ec2771bd69" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.144411 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1cfe-account-create-update-g6pb6" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.164010 4840 scope.go:117] "RemoveContainer" containerID="7caadfdbb2233ff8d9af4ed20fed744814ac639a14bb366f509bc793300360b8" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.214004 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.231208 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.240535 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241052 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1e31d97-cf03-4d2a-abd2-14cf20e60d90" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241074 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1e31d97-cf03-4d2a-abd2-14cf20e60d90" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241086 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-log" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241092 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-log" Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241106 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72d018e0-5ef5-45a1-914e-e733965b1089" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241113 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="72d018e0-5ef5-45a1-914e-e733965b1089" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241123 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48ad6b4f-c38d-40f9-9348-1af570cb7f35" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241128 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="48ad6b4f-c38d-40f9-9348-1af570cb7f35" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241141 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f53be4d9-c650-4344-8a6f-79258ea44a6c" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241146 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f53be4d9-c650-4344-8a6f-79258ea44a6c" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241159 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b6a2385-87c2-4077-a39a-440e4e3d3e51" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241164 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b6a2385-87c2-4077-a39a-440e4e3d3e51" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241179 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07957586-8bfc-4935-a092-2245ac49771a" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241185 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="07957586-8bfc-4935-a092-2245ac49771a" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: E1205 15:19:23.241194 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-httpd" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241200 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-httpd" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241386 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="07957586-8bfc-4935-a092-2245ac49771a" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241401 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b6a2385-87c2-4077-a39a-440e4e3d3e51" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241412 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-log" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241423 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="48ad6b4f-c38d-40f9-9348-1af570cb7f35" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241435 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f53be4d9-c650-4344-8a6f-79258ea44a6c" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241446 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="72d018e0-5ef5-45a1-914e-e733965b1089" containerName="mariadb-database-create" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241458 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1e31d97-cf03-4d2a-abd2-14cf20e60d90" containerName="mariadb-account-create-update" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.241469 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" containerName="glance-httpd" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.242474 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.258425 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.258807 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.263172 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286087 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286154 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286282 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6326423c-2a7f-4f3c-b361-de370bd51817-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286317 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286386 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286455 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6326423c-2a7f-4f3c-b361-de370bd51817-logs\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286522 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.286630 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44g9n\" (UniqueName: \"kubernetes.io/projected/6326423c-2a7f-4f3c-b361-de370bd51817-kube-api-access-44g9n\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.388815 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6326423c-2a7f-4f3c-b361-de370bd51817-logs\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.390206 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.390470 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44g9n\" (UniqueName: \"kubernetes.io/projected/6326423c-2a7f-4f3c-b361-de370bd51817-kube-api-access-44g9n\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.390666 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.390822 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.390977 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6326423c-2a7f-4f3c-b361-de370bd51817-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.391072 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.391205 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.389726 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6326423c-2a7f-4f3c-b361-de370bd51817-logs\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.391964 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6326423c-2a7f-4f3c-b361-de370bd51817-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.393012 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.396320 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.396373 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.398572 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.402423 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6326423c-2a7f-4f3c-b361-de370bd51817-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.411961 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44g9n\" (UniqueName: \"kubernetes.io/projected/6326423c-2a7f-4f3c-b361-de370bd51817-kube-api-access-44g9n\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.427457 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"6326423c-2a7f-4f3c-b361-de370bd51817\") " pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.587041 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:23 crc kubenswrapper[4840]: I1205 15:19:23.954116 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 15:19:24 crc kubenswrapper[4840]: I1205 15:19:24.086282 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="231dd866-8fc7-4d81-a391-8cfc74561bd6" path="/var/lib/kubelet/pods/231dd866-8fc7-4d81-a391-8cfc74561bd6/volumes" Dec 05 15:19:24 crc kubenswrapper[4840]: I1205 15:19:24.182041 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6326423c-2a7f-4f3c-b361-de370bd51817","Type":"ContainerStarted","Data":"0558de29e77a6e13526ad126a6c90008dcaa9e2ea571dd4933cef439ef82733a"} Dec 05 15:19:24 crc kubenswrapper[4840]: I1205 15:19:24.414899 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:19:24 crc kubenswrapper[4840]: I1205 15:19:24.415151 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-log" containerID="cri-o://7ce3d9c4d79a99efd6c6f4dd67e5809882b393c8c67152d5af4ff938c2b706bd" gracePeriod=30 Dec 05 15:19:24 crc kubenswrapper[4840]: I1205 15:19:24.415253 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-httpd" containerID="cri-o://64af2d24858b2beb279e44aa97a9422ef24169572f22c557b1db4008a94b96ed" gracePeriod=30 Dec 05 15:19:25 crc kubenswrapper[4840]: I1205 15:19:25.310304 4840 generic.go:334] "Generic (PLEG): container finished" podID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerID="7ce3d9c4d79a99efd6c6f4dd67e5809882b393c8c67152d5af4ff938c2b706bd" exitCode=143 Dec 05 15:19:25 crc kubenswrapper[4840]: I1205 15:19:25.310389 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3c46fc46-e681-4b04-8367-3a9f6e0283a6","Type":"ContainerDied","Data":"7ce3d9c4d79a99efd6c6f4dd67e5809882b393c8c67152d5af4ff938c2b706bd"} Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.100739 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.314044 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.375853 4840 generic.go:334] "Generic (PLEG): container finished" podID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerID="0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65" exitCode=137 Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.376052 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67c65cc6bd-s49k5" event={"ID":"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1","Type":"ContainerDied","Data":"0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65"} Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.376079 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67c65cc6bd-s49k5" event={"ID":"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1","Type":"ContainerDied","Data":"11b305fc9905553ba62ea67f93ee1484bc661879ec03c79e6a74b22159ffd04d"} Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.376095 4840 scope.go:117] "RemoveContainer" containerID="d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.376214 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67c65cc6bd-s49k5" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.387954 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jhpgl"] Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.388456 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="proxy-httpd" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388472 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="proxy-httpd" Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.388505 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="sg-core" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388514 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="sg-core" Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.388532 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388542 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.388558 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon-log" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388566 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon-log" Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.388578 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-central-agent" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388586 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-central-agent" Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.388604 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-notification-agent" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388611 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-notification-agent" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388847 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-central-agent" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388889 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="proxy-httpd" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388901 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon-log" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388916 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="sg-core" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388951 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerName="ceilometer-notification-agent" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.388963 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" containerName="horizon" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.389685 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.413462 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jhpgl"] Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.418780 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-combined-ca-bundle\") pod \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.418887 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-scripts\") pod \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.418976 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-config-data\") pod \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.419047 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-tls-certs\") pod \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.419061 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-secret-key\") pod \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.419131 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-logs\") pod \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.419156 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmrwd\" (UniqueName: \"kubernetes.io/projected/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-kube-api-access-nmrwd\") pod \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\" (UID: \"15d9aa0d-7948-40b8-a043-e6d36f2a5bb1\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.421782 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6326423c-2a7f-4f3c-b361-de370bd51817","Type":"ContainerStarted","Data":"014b4db218d5d64680adf93ef296095dbcab2310436135ba6dffa468fa17df86"} Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.422511 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.422514 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.422747 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xf7fq" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.426965 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" (UID: "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.439137 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-kube-api-access-nmrwd" (OuterVolumeSpecName: "kube-api-access-nmrwd") pod "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" (UID: "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1"). InnerVolumeSpecName "kube-api-access-nmrwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.439143 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-logs" (OuterVolumeSpecName: "logs") pod "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" (UID: "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.478451 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" (UID: "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.481514 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-config-data" (OuterVolumeSpecName: "config-data") pod "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" (UID: "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.483136 4840 generic.go:334] "Generic (PLEG): container finished" podID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerID="64af2d24858b2beb279e44aa97a9422ef24169572f22c557b1db4008a94b96ed" exitCode=0 Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.483224 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3c46fc46-e681-4b04-8367-3a9f6e0283a6","Type":"ContainerDied","Data":"64af2d24858b2beb279e44aa97a9422ef24169572f22c557b1db4008a94b96ed"} Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.500422 4840 generic.go:334] "Generic (PLEG): container finished" podID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" containerID="4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50" exitCode=0 Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.500470 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerDied","Data":"4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50"} Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.500498 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b9ce8593-a546-4c4f-b48d-6d2526aba0a0","Type":"ContainerDied","Data":"a769a38fe75d5c6125a91e6bc7646101dc3944120140a374a123b5baf188b0d1"} Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.500573 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.521173 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-log-httpd\") pod \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.521268 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcqf6\" (UniqueName: \"kubernetes.io/projected/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-kube-api-access-jcqf6\") pod \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.521293 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-combined-ca-bundle\") pod \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.521448 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-config-data\") pod \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.521491 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-sg-core-conf-yaml\") pod \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.521700 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-run-httpd\") pod \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.521738 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-scripts\") pod \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\" (UID: \"b9ce8593-a546-4c4f-b48d-6d2526aba0a0\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.522094 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-scripts" (OuterVolumeSpecName: "scripts") pod "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" (UID: "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.522084 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b9ce8593-a546-4c4f-b48d-6d2526aba0a0" (UID: "b9ce8593-a546-4c4f-b48d-6d2526aba0a0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523255 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-scripts\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523409 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4rp8\" (UniqueName: \"kubernetes.io/projected/fd12815f-fbf2-4030-847f-d843ac9f5343-kube-api-access-n4rp8\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523453 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523497 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-config-data\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523578 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523592 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523602 4840 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523610 4840 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523618 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523626 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmrwd\" (UniqueName: \"kubernetes.io/projected/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-kube-api-access-nmrwd\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.523634 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.529808 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-scripts" (OuterVolumeSpecName: "scripts") pod "b9ce8593-a546-4c4f-b48d-6d2526aba0a0" (UID: "b9ce8593-a546-4c4f-b48d-6d2526aba0a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.534742 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b9ce8593-a546-4c4f-b48d-6d2526aba0a0" (UID: "b9ce8593-a546-4c4f-b48d-6d2526aba0a0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.572554 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-kube-api-access-jcqf6" (OuterVolumeSpecName: "kube-api-access-jcqf6") pod "b9ce8593-a546-4c4f-b48d-6d2526aba0a0" (UID: "b9ce8593-a546-4c4f-b48d-6d2526aba0a0"). InnerVolumeSpecName "kube-api-access-jcqf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.604056 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" (UID: "15d9aa0d-7948-40b8-a043-e6d36f2a5bb1"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.627669 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4rp8\" (UniqueName: \"kubernetes.io/projected/fd12815f-fbf2-4030-847f-d843ac9f5343-kube-api-access-n4rp8\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.630052 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.630203 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-config-data\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.630319 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-scripts\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.630462 4840 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.630478 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.630493 4840 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.630506 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcqf6\" (UniqueName: \"kubernetes.io/projected/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-kube-api-access-jcqf6\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.646386 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-config-data\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.655146 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-scripts\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.666666 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.667329 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4rp8\" (UniqueName: \"kubernetes.io/projected/fd12815f-fbf2-4030-847f-d843ac9f5343-kube-api-access-n4rp8\") pod \"nova-cell0-conductor-db-sync-jhpgl\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.675179 4840 scope.go:117] "RemoveContainer" containerID="0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.719963 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b9ce8593-a546-4c4f-b48d-6d2526aba0a0" (UID: "b9ce8593-a546-4c4f-b48d-6d2526aba0a0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.740015 4840 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.812028 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.857585 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-config-data" (OuterVolumeSpecName: "config-data") pod "b9ce8593-a546-4c4f-b48d-6d2526aba0a0" (UID: "b9ce8593-a546-4c4f-b48d-6d2526aba0a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.878735 4840 scope.go:117] "RemoveContainer" containerID="d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96" Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.891527 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96\": container with ID starting with d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96 not found: ID does not exist" containerID="d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.891580 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96"} err="failed to get container status \"d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96\": rpc error: code = NotFound desc = could not find container \"d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96\": container with ID starting with d4456ed432fab1713012466f282b0a590b939e801f0b909aaed8aca97366bc96 not found: ID does not exist" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.891615 4840 scope.go:117] "RemoveContainer" containerID="0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65" Dec 05 15:19:28 crc kubenswrapper[4840]: E1205 15:19:28.896771 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65\": container with ID starting with 0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65 not found: ID does not exist" containerID="0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.897182 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65"} err="failed to get container status \"0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65\": rpc error: code = NotFound desc = could not find container \"0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65\": container with ID starting with 0f4137ff5a4365825eaf588ce2e0ecffcd8c5c44aa14dc9661428fbcf297af65 not found: ID does not exist" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.897214 4840 scope.go:117] "RemoveContainer" containerID="95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.897430 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67c65cc6bd-s49k5"] Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.899070 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.903931 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9ce8593-a546-4c4f-b48d-6d2526aba0a0" (UID: "b9ce8593-a546-4c4f-b48d-6d2526aba0a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.908542 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-67c65cc6bd-s49k5"] Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.920987 4840 scope.go:117] "RemoveContainer" containerID="c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.944275 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff6zr\" (UniqueName: \"kubernetes.io/projected/3c46fc46-e681-4b04-8367-3a9f6e0283a6-kube-api-access-ff6zr\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.944402 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-config-data\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.944462 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-httpd-run\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.944548 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-public-tls-certs\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.944613 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-scripts\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.944681 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-combined-ca-bundle\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.944734 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-logs\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.946251 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.950285 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c46fc46-e681-4b04-8367-3a9f6e0283a6-kube-api-access-ff6zr" (OuterVolumeSpecName: "kube-api-access-ff6zr") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "kube-api-access-ff6zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.958214 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-logs" (OuterVolumeSpecName: "logs") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.966522 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\" (UID: \"3c46fc46-e681-4b04-8367-3a9f6e0283a6\") " Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.967345 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.967363 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.967376 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff6zr\" (UniqueName: \"kubernetes.io/projected/3c46fc46-e681-4b04-8367-3a9f6e0283a6-kube-api-access-ff6zr\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.967388 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9ce8593-a546-4c4f-b48d-6d2526aba0a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.967399 4840 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c46fc46-e681-4b04-8367-3a9f6e0283a6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.987036 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 15:19:28 crc kubenswrapper[4840]: I1205 15:19:28.991313 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-scripts" (OuterVolumeSpecName: "scripts") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:28.999999 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.006087 4840 scope.go:117] "RemoveContainer" containerID="f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.069253 4840 scope.go:117] "RemoveContainer" containerID="4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.070605 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.070651 4840 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.070669 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.083129 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.091231 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-config-data" (OuterVolumeSpecName: "config-data") pod "3c46fc46-e681-4b04-8367-3a9f6e0283a6" (UID: "3c46fc46-e681-4b04-8367-3a9f6e0283a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.123659 4840 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.172292 4840 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.172322 4840 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.172333 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c46fc46-e681-4b04-8367-3a9f6e0283a6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.216579 4840 scope.go:117] "RemoveContainer" containerID="95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e" Dec 05 15:19:29 crc kubenswrapper[4840]: E1205 15:19:29.217983 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e\": container with ID starting with 95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e not found: ID does not exist" containerID="95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.218020 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e"} err="failed to get container status \"95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e\": rpc error: code = NotFound desc = could not find container \"95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e\": container with ID starting with 95c6ff8b65b51d381888a7582790b4eadbb627c64b6601422277e3026f21f38e not found: ID does not exist" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.218049 4840 scope.go:117] "RemoveContainer" containerID="c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1" Dec 05 15:19:29 crc kubenswrapper[4840]: E1205 15:19:29.221417 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1\": container with ID starting with c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1 not found: ID does not exist" containerID="c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.221464 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1"} err="failed to get container status \"c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1\": rpc error: code = NotFound desc = could not find container \"c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1\": container with ID starting with c0cec1b9e7d7167b66e4b4b564af10e078a1f469b9f6cd7703c40983368f29c1 not found: ID does not exist" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.221489 4840 scope.go:117] "RemoveContainer" containerID="f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74" Dec 05 15:19:29 crc kubenswrapper[4840]: E1205 15:19:29.221951 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74\": container with ID starting with f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74 not found: ID does not exist" containerID="f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.221979 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74"} err="failed to get container status \"f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74\": rpc error: code = NotFound desc = could not find container \"f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74\": container with ID starting with f3dd9e49685d2cd8747b189efb5c92a2ba00b8da01f4f89472db135bb79fbb74 not found: ID does not exist" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.221994 4840 scope.go:117] "RemoveContainer" containerID="4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50" Dec 05 15:19:29 crc kubenswrapper[4840]: E1205 15:19:29.225157 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50\": container with ID starting with 4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50 not found: ID does not exist" containerID="4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.225197 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50"} err="failed to get container status \"4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50\": rpc error: code = NotFound desc = could not find container \"4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50\": container with ID starting with 4011a702e843de0022396e7e6fb4bef52c65274e056de06d8bdf9c26d2264f50 not found: ID does not exist" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.225930 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.245264 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.289168 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: E1205 15:19:29.383513 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-log" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.383545 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-log" Dec 05 15:19:29 crc kubenswrapper[4840]: E1205 15:19:29.383602 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-httpd" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.383609 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-httpd" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.384184 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-log" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.384223 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" containerName="glance-httpd" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.397257 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.397428 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.404721 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.405138 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.521484 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.539345 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3c46fc46-e681-4b04-8367-3a9f6e0283a6","Type":"ContainerDied","Data":"9783f868f7a9fc5db95e6ed77f4f78beb6e992bdfe4576f05db7cb88995bafa6"} Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.539395 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.539408 4840 scope.go:117] "RemoveContainer" containerID="64af2d24858b2beb279e44aa97a9422ef24169572f22c557b1db4008a94b96ed" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.568842 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6326423c-2a7f-4f3c-b361-de370bd51817","Type":"ContainerStarted","Data":"8187ccc2a6600de3013a88409153572cf513568da6cda67049db1b6bd5e605d5"} Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.608843 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmn5q\" (UniqueName: \"kubernetes.io/projected/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-kube-api-access-nmn5q\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.609356 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.609390 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-run-httpd\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.609441 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.609506 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-scripts\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.609537 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-config-data\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.609588 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-log-httpd\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.627605 4840 scope.go:117] "RemoveContainer" containerID="7ce3d9c4d79a99efd6c6f4dd67e5809882b393c8c67152d5af4ff938c2b706bd" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.637056 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.637029014 podStartE2EDuration="6.637029014s" podCreationTimestamp="2025-12-05 15:19:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:19:29.607519673 +0000 UTC m=+1247.948582287" watchObservedRunningTime="2025-12-05 15:19:29.637029014 +0000 UTC m=+1247.978091628" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.657106 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.676269 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.694756 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jhpgl"] Dec 05 15:19:29 crc kubenswrapper[4840]: W1205 15:19:29.707579 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd12815f_fbf2_4030_847f_d843ac9f5343.slice/crio-e7b3ec0ef34e6bede222aab53c9fa2311a84d8d0572aa3e06ecbdff5f66371c3 WatchSource:0}: Error finding container e7b3ec0ef34e6bede222aab53c9fa2311a84d8d0572aa3e06ecbdff5f66371c3: Status 404 returned error can't find the container with id e7b3ec0ef34e6bede222aab53c9fa2311a84d8d0572aa3e06ecbdff5f66371c3 Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.710925 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-config-data\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.712163 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-log-httpd\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.712294 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmn5q\" (UniqueName: \"kubernetes.io/projected/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-kube-api-access-nmn5q\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.712327 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.712353 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-run-httpd\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.712389 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.712474 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-scripts\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.714194 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-log-httpd\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.714987 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-run-httpd\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.717742 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-scripts\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.723640 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.726203 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.727987 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.728143 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-config-data\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.740509 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.745714 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.747624 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.751190 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.756737 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmn5q\" (UniqueName: \"kubernetes.io/projected/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-kube-api-access-nmn5q\") pod \"ceilometer-0\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " pod="openstack/ceilometer-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.988680 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8843aac-3856-4c2d-80d4-d3f642065c75-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.989095 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.989162 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.989207 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8843aac-3856-4c2d-80d4-d3f642065c75-logs\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.989246 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwxpt\" (UniqueName: \"kubernetes.io/projected/b8843aac-3856-4c2d-80d4-d3f642065c75-kube-api-access-fwxpt\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.989278 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.989397 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:29 crc kubenswrapper[4840]: I1205 15:19:29.989438 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.031828 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092146 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwxpt\" (UniqueName: \"kubernetes.io/projected/b8843aac-3856-4c2d-80d4-d3f642065c75-kube-api-access-fwxpt\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092190 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092248 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092270 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092323 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8843aac-3856-4c2d-80d4-d3f642065c75-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092354 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092377 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.092398 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8843aac-3856-4c2d-80d4-d3f642065c75-logs\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.093546 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8843aac-3856-4c2d-80d4-d3f642065c75-logs\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.095970 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.096775 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b8843aac-3856-4c2d-80d4-d3f642065c75-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.099695 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.099997 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-scripts\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.102960 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.103653 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15d9aa0d-7948-40b8-a043-e6d36f2a5bb1" path="/var/lib/kubelet/pods/15d9aa0d-7948-40b8-a043-e6d36f2a5bb1/volumes" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.105967 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8843aac-3856-4c2d-80d4-d3f642065c75-config-data\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.117326 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c46fc46-e681-4b04-8367-3a9f6e0283a6" path="/var/lib/kubelet/pods/3c46fc46-e681-4b04-8367-3a9f6e0283a6/volumes" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.118769 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwxpt\" (UniqueName: \"kubernetes.io/projected/b8843aac-3856-4c2d-80d4-d3f642065c75-kube-api-access-fwxpt\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.122201 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9ce8593-a546-4c4f-b48d-6d2526aba0a0" path="/var/lib/kubelet/pods/b9ce8593-a546-4c4f-b48d-6d2526aba0a0/volumes" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.154502 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b8843aac-3856-4c2d-80d4-d3f642065c75\") " pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.299780 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.355039 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:30 crc kubenswrapper[4840]: W1205 15:19:30.362338 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22cc58f6_58aa_45d8_ab38_630bcec0fcf3.slice/crio-7d925ba49dae9d976e7d193af126cfd4187f9f88790333ab181a559ce56b0844 WatchSource:0}: Error finding container 7d925ba49dae9d976e7d193af126cfd4187f9f88790333ab181a559ce56b0844: Status 404 returned error can't find the container with id 7d925ba49dae9d976e7d193af126cfd4187f9f88790333ab181a559ce56b0844 Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.603015 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerStarted","Data":"7d925ba49dae9d976e7d193af126cfd4187f9f88790333ab181a559ce56b0844"} Dec 05 15:19:30 crc kubenswrapper[4840]: I1205 15:19:30.613217 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" event={"ID":"fd12815f-fbf2-4030-847f-d843ac9f5343","Type":"ContainerStarted","Data":"e7b3ec0ef34e6bede222aab53c9fa2311a84d8d0572aa3e06ecbdff5f66371c3"} Dec 05 15:19:31 crc kubenswrapper[4840]: I1205 15:19:31.257245 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 15:19:31 crc kubenswrapper[4840]: I1205 15:19:31.632612 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8843aac-3856-4c2d-80d4-d3f642065c75","Type":"ContainerStarted","Data":"0ba1f26aded792c1588eb1ab8dbc6f58167c48a211f7cd95ec1ea7f8b296ca4f"} Dec 05 15:19:31 crc kubenswrapper[4840]: I1205 15:19:31.774778 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerStarted","Data":"b6a1e62247c35fbfe1fdc67716a5ef053edd591686c52a96fcecc48fac981ef4"} Dec 05 15:19:32 crc kubenswrapper[4840]: I1205 15:19:32.809756 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8843aac-3856-4c2d-80d4-d3f642065c75","Type":"ContainerStarted","Data":"21ece70dffd64843f987d2cb396b85bfed8c74799aa12bfb6d11f06c77fd470e"} Dec 05 15:19:32 crc kubenswrapper[4840]: I1205 15:19:32.823357 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerStarted","Data":"ab4f6bb82bb15212be410d2c5cbae3cab18aa555445c5b27e53110e2e4bf3c43"} Dec 05 15:19:33 crc kubenswrapper[4840]: I1205 15:19:33.678717 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:33 crc kubenswrapper[4840]: I1205 15:19:33.680597 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:33 crc kubenswrapper[4840]: I1205 15:19:33.733744 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:33 crc kubenswrapper[4840]: I1205 15:19:33.780549 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:33 crc kubenswrapper[4840]: I1205 15:19:33.834926 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:33 crc kubenswrapper[4840]: I1205 15:19:33.835119 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:34 crc kubenswrapper[4840]: I1205 15:19:34.849103 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b8843aac-3856-4c2d-80d4-d3f642065c75","Type":"ContainerStarted","Data":"05a016d0fc85aa631a53d18c42e1a938e9470e44050b978d3599e7bcb5c70eae"} Dec 05 15:19:34 crc kubenswrapper[4840]: I1205 15:19:34.858997 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerStarted","Data":"7320f0e3af66ceb5d0862533e6bd52882c4b748a0ac56b3ddd6c836fd47e53d5"} Dec 05 15:19:34 crc kubenswrapper[4840]: I1205 15:19:34.880375 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.880338787 podStartE2EDuration="5.880338787s" podCreationTimestamp="2025-12-05 15:19:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:19:34.87120879 +0000 UTC m=+1253.212271414" watchObservedRunningTime="2025-12-05 15:19:34.880338787 +0000 UTC m=+1253.221401401" Dec 05 15:19:34 crc kubenswrapper[4840]: I1205 15:19:34.901809 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:35 crc kubenswrapper[4840]: I1205 15:19:35.501152 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="b2379ec8-f983-42df-9255-2a97b8589b6d" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.172:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 15:19:37 crc kubenswrapper[4840]: I1205 15:19:37.089044 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerStarted","Data":"d28ae635ad83435bf5f471b3b847bc3ad3f2f925fe9df6f8e1a4b8defc3d5794"} Dec 05 15:19:37 crc kubenswrapper[4840]: I1205 15:19:37.090513 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-central-agent" containerID="cri-o://b6a1e62247c35fbfe1fdc67716a5ef053edd591686c52a96fcecc48fac981ef4" gracePeriod=30 Dec 05 15:19:37 crc kubenswrapper[4840]: I1205 15:19:37.090647 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="proxy-httpd" containerID="cri-o://d28ae635ad83435bf5f471b3b847bc3ad3f2f925fe9df6f8e1a4b8defc3d5794" gracePeriod=30 Dec 05 15:19:37 crc kubenswrapper[4840]: I1205 15:19:37.090648 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:19:37 crc kubenswrapper[4840]: I1205 15:19:37.090705 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="sg-core" containerID="cri-o://7320f0e3af66ceb5d0862533e6bd52882c4b748a0ac56b3ddd6c836fd47e53d5" gracePeriod=30 Dec 05 15:19:37 crc kubenswrapper[4840]: I1205 15:19:37.090741 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-notification-agent" containerID="cri-o://ab4f6bb82bb15212be410d2c5cbae3cab18aa555445c5b27e53110e2e4bf3c43" gracePeriod=30 Dec 05 15:19:37 crc kubenswrapper[4840]: I1205 15:19:37.124907 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.776208993 podStartE2EDuration="8.124884243s" podCreationTimestamp="2025-12-05 15:19:29 +0000 UTC" firstStartedPulling="2025-12-05 15:19:30.366446357 +0000 UTC m=+1248.707508971" lastFinishedPulling="2025-12-05 15:19:35.715121597 +0000 UTC m=+1254.056184221" observedRunningTime="2025-12-05 15:19:37.120503139 +0000 UTC m=+1255.461565753" watchObservedRunningTime="2025-12-05 15:19:37.124884243 +0000 UTC m=+1255.465946857" Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.170087 4840 generic.go:334] "Generic (PLEG): container finished" podID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerID="d28ae635ad83435bf5f471b3b847bc3ad3f2f925fe9df6f8e1a4b8defc3d5794" exitCode=0 Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.170493 4840 generic.go:334] "Generic (PLEG): container finished" podID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerID="7320f0e3af66ceb5d0862533e6bd52882c4b748a0ac56b3ddd6c836fd47e53d5" exitCode=2 Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.170503 4840 generic.go:334] "Generic (PLEG): container finished" podID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerID="ab4f6bb82bb15212be410d2c5cbae3cab18aa555445c5b27e53110e2e4bf3c43" exitCode=0 Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.170525 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerDied","Data":"d28ae635ad83435bf5f471b3b847bc3ad3f2f925fe9df6f8e1a4b8defc3d5794"} Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.170559 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerDied","Data":"7320f0e3af66ceb5d0862533e6bd52882c4b748a0ac56b3ddd6c836fd47e53d5"} Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.170572 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerDied","Data":"ab4f6bb82bb15212be410d2c5cbae3cab18aa555445c5b27e53110e2e4bf3c43"} Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.193024 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.193359 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 15:19:38 crc kubenswrapper[4840]: I1205 15:19:38.193714 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 15:19:40 crc kubenswrapper[4840]: I1205 15:19:40.302191 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 15:19:40 crc kubenswrapper[4840]: I1205 15:19:40.303781 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 15:19:40 crc kubenswrapper[4840]: I1205 15:19:40.382064 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 15:19:40 crc kubenswrapper[4840]: I1205 15:19:40.391191 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 15:19:41 crc kubenswrapper[4840]: I1205 15:19:41.345294 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 15:19:41 crc kubenswrapper[4840]: I1205 15:19:41.345539 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 15:19:44 crc kubenswrapper[4840]: I1205 15:19:44.028491 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 15:19:44 crc kubenswrapper[4840]: I1205 15:19:44.028948 4840 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 15:19:44 crc kubenswrapper[4840]: I1205 15:19:44.120617 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 15:19:45 crc kubenswrapper[4840]: I1205 15:19:45.925958 4840 generic.go:334] "Generic (PLEG): container finished" podID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerID="b6a1e62247c35fbfe1fdc67716a5ef053edd591686c52a96fcecc48fac981ef4" exitCode=0 Dec 05 15:19:45 crc kubenswrapper[4840]: I1205 15:19:45.926314 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerDied","Data":"b6a1e62247c35fbfe1fdc67716a5ef053edd591686c52a96fcecc48fac981ef4"} Dec 05 15:19:47 crc kubenswrapper[4840]: I1205 15:19:47.918739 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:47 crc kubenswrapper[4840]: I1205 15:19:47.945814 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" event={"ID":"fd12815f-fbf2-4030-847f-d843ac9f5343","Type":"ContainerStarted","Data":"6b12bc79556bdc8de3058cc82a9e6df2f5c7ca0e31fa1eeccd6f92664993c2dd"} Dec 05 15:19:47 crc kubenswrapper[4840]: I1205 15:19:47.949369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22cc58f6-58aa-45d8-ab38-630bcec0fcf3","Type":"ContainerDied","Data":"7d925ba49dae9d976e7d193af126cfd4187f9f88790333ab181a559ce56b0844"} Dec 05 15:19:47 crc kubenswrapper[4840]: I1205 15:19:47.949429 4840 scope.go:117] "RemoveContainer" containerID="d28ae635ad83435bf5f471b3b847bc3ad3f2f925fe9df6f8e1a4b8defc3d5794" Dec 05 15:19:47 crc kubenswrapper[4840]: I1205 15:19:47.949432 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:47 crc kubenswrapper[4840]: I1205 15:19:47.968790 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" podStartSLOduration=2.127065998 podStartE2EDuration="19.968772177s" podCreationTimestamp="2025-12-05 15:19:28 +0000 UTC" firstStartedPulling="2025-12-05 15:19:29.711684896 +0000 UTC m=+1248.052747510" lastFinishedPulling="2025-12-05 15:19:47.553391075 +0000 UTC m=+1265.894453689" observedRunningTime="2025-12-05 15:19:47.966565865 +0000 UTC m=+1266.307628479" watchObservedRunningTime="2025-12-05 15:19:47.968772177 +0000 UTC m=+1266.309834791" Dec 05 15:19:47 crc kubenswrapper[4840]: I1205 15:19:47.975381 4840 scope.go:117] "RemoveContainer" containerID="7320f0e3af66ceb5d0862533e6bd52882c4b748a0ac56b3ddd6c836fd47e53d5" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.048645 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-log-httpd\") pod \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.048943 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-config-data\") pod \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.049001 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmn5q\" (UniqueName: \"kubernetes.io/projected/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-kube-api-access-nmn5q\") pod \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.049095 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-combined-ca-bundle\") pod \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.049152 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-sg-core-conf-yaml\") pod \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.049219 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-scripts\") pod \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.049331 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-run-httpd\") pod \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\" (UID: \"22cc58f6-58aa-45d8-ab38-630bcec0fcf3\") " Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.049460 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "22cc58f6-58aa-45d8-ab38-630bcec0fcf3" (UID: "22cc58f6-58aa-45d8-ab38-630bcec0fcf3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.050159 4840 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.361132 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "22cc58f6-58aa-45d8-ab38-630bcec0fcf3" (UID: "22cc58f6-58aa-45d8-ab38-630bcec0fcf3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.364770 4840 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.369644 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-scripts" (OuterVolumeSpecName: "scripts") pod "22cc58f6-58aa-45d8-ab38-630bcec0fcf3" (UID: "22cc58f6-58aa-45d8-ab38-630bcec0fcf3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.374746 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-kube-api-access-nmn5q" (OuterVolumeSpecName: "kube-api-access-nmn5q") pod "22cc58f6-58aa-45d8-ab38-630bcec0fcf3" (UID: "22cc58f6-58aa-45d8-ab38-630bcec0fcf3"). InnerVolumeSpecName "kube-api-access-nmn5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.375288 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "22cc58f6-58aa-45d8-ab38-630bcec0fcf3" (UID: "22cc58f6-58aa-45d8-ab38-630bcec0fcf3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.381233 4840 scope.go:117] "RemoveContainer" containerID="ab4f6bb82bb15212be410d2c5cbae3cab18aa555445c5b27e53110e2e4bf3c43" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.425114 4840 scope.go:117] "RemoveContainer" containerID="b6a1e62247c35fbfe1fdc67716a5ef053edd591686c52a96fcecc48fac981ef4" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.448236 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22cc58f6-58aa-45d8-ab38-630bcec0fcf3" (UID: "22cc58f6-58aa-45d8-ab38-630bcec0fcf3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.466472 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmn5q\" (UniqueName: \"kubernetes.io/projected/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-kube-api-access-nmn5q\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.466520 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.466528 4840 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.466541 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.478639 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-config-data" (OuterVolumeSpecName: "config-data") pod "22cc58f6-58aa-45d8-ab38-630bcec0fcf3" (UID: "22cc58f6-58aa-45d8-ab38-630bcec0fcf3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.568452 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cc58f6-58aa-45d8-ab38-630bcec0fcf3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.614696 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.626757 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.651498 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:48 crc kubenswrapper[4840]: E1205 15:19:48.651927 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-notification-agent" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.651949 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-notification-agent" Dec 05 15:19:48 crc kubenswrapper[4840]: E1205 15:19:48.651959 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-central-agent" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.651965 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-central-agent" Dec 05 15:19:48 crc kubenswrapper[4840]: E1205 15:19:48.651989 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="proxy-httpd" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.651996 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="proxy-httpd" Dec 05 15:19:48 crc kubenswrapper[4840]: E1205 15:19:48.652015 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="sg-core" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.652020 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="sg-core" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.652252 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="proxy-httpd" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.652270 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="sg-core" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.652292 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-notification-agent" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.652304 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" containerName="ceilometer-central-agent" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.654092 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.655915 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.656900 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.665839 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.714302 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-run-httpd\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.714360 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-log-httpd\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.714445 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-config-data\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.714612 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpvkj\" (UniqueName: \"kubernetes.io/projected/dead0240-da38-4ccb-8b4b-b537e9732211-kube-api-access-gpvkj\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.714668 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.714699 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-scripts\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.714738 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.816920 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-run-httpd\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.817441 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-log-httpd\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.817392 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-run-httpd\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.817537 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-config-data\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.817920 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-log-httpd\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.818186 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpvkj\" (UniqueName: \"kubernetes.io/projected/dead0240-da38-4ccb-8b4b-b537e9732211-kube-api-access-gpvkj\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.818226 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.818243 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-scripts\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.818300 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.822472 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-config-data\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.822449 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-scripts\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.823035 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.823214 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:48 crc kubenswrapper[4840]: I1205 15:19:48.838142 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpvkj\" (UniqueName: \"kubernetes.io/projected/dead0240-da38-4ccb-8b4b-b537e9732211-kube-api-access-gpvkj\") pod \"ceilometer-0\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " pod="openstack/ceilometer-0" Dec 05 15:19:49 crc kubenswrapper[4840]: I1205 15:19:49.020005 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:19:49 crc kubenswrapper[4840]: I1205 15:19:49.729264 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:19:49 crc kubenswrapper[4840]: W1205 15:19:49.734593 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddead0240_da38_4ccb_8b4b_b537e9732211.slice/crio-e949f24908cb74fcaa2658658770c3beb0265b5af03f3b71470874ce6accff66 WatchSource:0}: Error finding container e949f24908cb74fcaa2658658770c3beb0265b5af03f3b71470874ce6accff66: Status 404 returned error can't find the container with id e949f24908cb74fcaa2658658770c3beb0265b5af03f3b71470874ce6accff66 Dec 05 15:19:49 crc kubenswrapper[4840]: I1205 15:19:49.737269 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:19:49 crc kubenswrapper[4840]: I1205 15:19:49.973934 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerStarted","Data":"e949f24908cb74fcaa2658658770c3beb0265b5af03f3b71470874ce6accff66"} Dec 05 15:19:50 crc kubenswrapper[4840]: I1205 15:19:50.079794 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22cc58f6-58aa-45d8-ab38-630bcec0fcf3" path="/var/lib/kubelet/pods/22cc58f6-58aa-45d8-ab38-630bcec0fcf3/volumes" Dec 05 15:19:50 crc kubenswrapper[4840]: I1205 15:19:50.988414 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerStarted","Data":"1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b"} Dec 05 15:19:52 crc kubenswrapper[4840]: I1205 15:19:52.000682 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerStarted","Data":"0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47"} Dec 05 15:19:53 crc kubenswrapper[4840]: I1205 15:19:53.011261 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerStarted","Data":"69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654"} Dec 05 15:19:54 crc kubenswrapper[4840]: I1205 15:19:54.021676 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerStarted","Data":"6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0"} Dec 05 15:19:54 crc kubenswrapper[4840]: I1205 15:19:54.022305 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:19:54 crc kubenswrapper[4840]: I1205 15:19:54.054297 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.314232604 podStartE2EDuration="6.05428139s" podCreationTimestamp="2025-12-05 15:19:48 +0000 UTC" firstStartedPulling="2025-12-05 15:19:49.736979214 +0000 UTC m=+1268.078041828" lastFinishedPulling="2025-12-05 15:19:53.477028 +0000 UTC m=+1271.818090614" observedRunningTime="2025-12-05 15:19:54.045679978 +0000 UTC m=+1272.386742592" watchObservedRunningTime="2025-12-05 15:19:54.05428139 +0000 UTC m=+1272.395344004" Dec 05 15:20:01 crc kubenswrapper[4840]: I1205 15:20:01.085854 4840 generic.go:334] "Generic (PLEG): container finished" podID="fd12815f-fbf2-4030-847f-d843ac9f5343" containerID="6b12bc79556bdc8de3058cc82a9e6df2f5c7ca0e31fa1eeccd6f92664993c2dd" exitCode=0 Dec 05 15:20:01 crc kubenswrapper[4840]: I1205 15:20:01.085923 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" event={"ID":"fd12815f-fbf2-4030-847f-d843ac9f5343","Type":"ContainerDied","Data":"6b12bc79556bdc8de3058cc82a9e6df2f5c7ca0e31fa1eeccd6f92664993c2dd"} Dec 05 15:20:01 crc kubenswrapper[4840]: I1205 15:20:01.325274 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:01 crc kubenswrapper[4840]: I1205 15:20:01.325592 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-central-agent" containerID="cri-o://1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b" gracePeriod=30 Dec 05 15:20:01 crc kubenswrapper[4840]: I1205 15:20:01.325678 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="proxy-httpd" containerID="cri-o://6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0" gracePeriod=30 Dec 05 15:20:01 crc kubenswrapper[4840]: I1205 15:20:01.325752 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-notification-agent" containerID="cri-o://0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47" gracePeriod=30 Dec 05 15:20:01 crc kubenswrapper[4840]: I1205 15:20:01.325751 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="sg-core" containerID="cri-o://69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654" gracePeriod=30 Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.227895 4840 generic.go:334] "Generic (PLEG): container finished" podID="dead0240-da38-4ccb-8b4b-b537e9732211" containerID="6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0" exitCode=0 Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.228200 4840 generic.go:334] "Generic (PLEG): container finished" podID="dead0240-da38-4ccb-8b4b-b537e9732211" containerID="69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654" exitCode=2 Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.228213 4840 generic.go:334] "Generic (PLEG): container finished" podID="dead0240-da38-4ccb-8b4b-b537e9732211" containerID="1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b" exitCode=0 Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.228442 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerDied","Data":"6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0"} Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.228478 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerDied","Data":"69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654"} Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.228491 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerDied","Data":"1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b"} Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.570918 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.685443 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.702428 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-run-httpd\") pod \"dead0240-da38-4ccb-8b4b-b537e9732211\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.702818 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-sg-core-conf-yaml\") pod \"dead0240-da38-4ccb-8b4b-b537e9732211\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.702945 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-combined-ca-bundle\") pod \"dead0240-da38-4ccb-8b4b-b537e9732211\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.703101 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-log-httpd\") pod \"dead0240-da38-4ccb-8b4b-b537e9732211\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.703224 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-scripts\") pod \"dead0240-da38-4ccb-8b4b-b537e9732211\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.703338 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-config-data\") pod \"dead0240-da38-4ccb-8b4b-b537e9732211\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.703430 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpvkj\" (UniqueName: \"kubernetes.io/projected/dead0240-da38-4ccb-8b4b-b537e9732211-kube-api-access-gpvkj\") pod \"dead0240-da38-4ccb-8b4b-b537e9732211\" (UID: \"dead0240-da38-4ccb-8b4b-b537e9732211\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.712146 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dead0240-da38-4ccb-8b4b-b537e9732211-kube-api-access-gpvkj" (OuterVolumeSpecName: "kube-api-access-gpvkj") pod "dead0240-da38-4ccb-8b4b-b537e9732211" (UID: "dead0240-da38-4ccb-8b4b-b537e9732211"). InnerVolumeSpecName "kube-api-access-gpvkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.713283 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dead0240-da38-4ccb-8b4b-b537e9732211" (UID: "dead0240-da38-4ccb-8b4b-b537e9732211"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.713384 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dead0240-da38-4ccb-8b4b-b537e9732211" (UID: "dead0240-da38-4ccb-8b4b-b537e9732211"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.717713 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-scripts" (OuterVolumeSpecName: "scripts") pod "dead0240-da38-4ccb-8b4b-b537e9732211" (UID: "dead0240-da38-4ccb-8b4b-b537e9732211"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.742705 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dead0240-da38-4ccb-8b4b-b537e9732211" (UID: "dead0240-da38-4ccb-8b4b-b537e9732211"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.797709 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dead0240-da38-4ccb-8b4b-b537e9732211" (UID: "dead0240-da38-4ccb-8b4b-b537e9732211"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.804947 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-combined-ca-bundle\") pod \"fd12815f-fbf2-4030-847f-d843ac9f5343\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805098 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-scripts\") pod \"fd12815f-fbf2-4030-847f-d843ac9f5343\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805170 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4rp8\" (UniqueName: \"kubernetes.io/projected/fd12815f-fbf2-4030-847f-d843ac9f5343-kube-api-access-n4rp8\") pod \"fd12815f-fbf2-4030-847f-d843ac9f5343\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805209 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-config-data\") pod \"fd12815f-fbf2-4030-847f-d843ac9f5343\" (UID: \"fd12815f-fbf2-4030-847f-d843ac9f5343\") " Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805582 4840 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805598 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805608 4840 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805617 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805625 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpvkj\" (UniqueName: \"kubernetes.io/projected/dead0240-da38-4ccb-8b4b-b537e9732211-kube-api-access-gpvkj\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.805634 4840 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dead0240-da38-4ccb-8b4b-b537e9732211-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.808611 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd12815f-fbf2-4030-847f-d843ac9f5343-kube-api-access-n4rp8" (OuterVolumeSpecName: "kube-api-access-n4rp8") pod "fd12815f-fbf2-4030-847f-d843ac9f5343" (UID: "fd12815f-fbf2-4030-847f-d843ac9f5343"). InnerVolumeSpecName "kube-api-access-n4rp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.810475 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-scripts" (OuterVolumeSpecName: "scripts") pod "fd12815f-fbf2-4030-847f-d843ac9f5343" (UID: "fd12815f-fbf2-4030-847f-d843ac9f5343"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.823306 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-config-data" (OuterVolumeSpecName: "config-data") pod "dead0240-da38-4ccb-8b4b-b537e9732211" (UID: "dead0240-da38-4ccb-8b4b-b537e9732211"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.843471 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd12815f-fbf2-4030-847f-d843ac9f5343" (UID: "fd12815f-fbf2-4030-847f-d843ac9f5343"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.843845 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-config-data" (OuterVolumeSpecName: "config-data") pod "fd12815f-fbf2-4030-847f-d843ac9f5343" (UID: "fd12815f-fbf2-4030-847f-d843ac9f5343"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.907774 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4rp8\" (UniqueName: \"kubernetes.io/projected/fd12815f-fbf2-4030-847f-d843ac9f5343-kube-api-access-n4rp8\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.907811 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.907822 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dead0240-da38-4ccb-8b4b-b537e9732211-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.907831 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:02 crc kubenswrapper[4840]: I1205 15:20:02.907841 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd12815f-fbf2-4030-847f-d843ac9f5343-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.251199 4840 generic.go:334] "Generic (PLEG): container finished" podID="dead0240-da38-4ccb-8b4b-b537e9732211" containerID="0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47" exitCode=0 Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.251362 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerDied","Data":"0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47"} Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.252675 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dead0240-da38-4ccb-8b4b-b537e9732211","Type":"ContainerDied","Data":"e949f24908cb74fcaa2658658770c3beb0265b5af03f3b71470874ce6accff66"} Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.251463 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.252757 4840 scope.go:117] "RemoveContainer" containerID="6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.255283 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" event={"ID":"fd12815f-fbf2-4030-847f-d843ac9f5343","Type":"ContainerDied","Data":"e7b3ec0ef34e6bede222aab53c9fa2311a84d8d0572aa3e06ecbdff5f66371c3"} Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.255325 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7b3ec0ef34e6bede222aab53c9fa2311a84d8d0572aa3e06ecbdff5f66371c3" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.255400 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-jhpgl" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.308042 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.318677 4840 scope.go:117] "RemoveContainer" containerID="69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.336980 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.354923 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.355391 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="proxy-httpd" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355410 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="proxy-httpd" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.355431 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-notification-agent" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355438 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-notification-agent" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.355456 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="sg-core" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355464 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="sg-core" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.355481 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-central-agent" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355489 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-central-agent" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.355503 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd12815f-fbf2-4030-847f-d843ac9f5343" containerName="nova-cell0-conductor-db-sync" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355510 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd12815f-fbf2-4030-847f-d843ac9f5343" containerName="nova-cell0-conductor-db-sync" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355713 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="sg-core" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355736 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd12815f-fbf2-4030-847f-d843ac9f5343" containerName="nova-cell0-conductor-db-sync" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355752 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-notification-agent" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355769 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="ceilometer-central-agent" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.355775 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" containerName="proxy-httpd" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.356493 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.362930 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xf7fq" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.363829 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.379150 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.388118 4840 scope.go:117] "RemoveContainer" containerID="0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.403880 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.405918 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.407975 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.415235 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.430950 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.478346 4840 scope.go:117] "RemoveContainer" containerID="1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520371 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520708 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-scripts\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520749 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c921bef-fe53-4e64-bf34-1faa504c8a15-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520775 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-log-httpd\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520822 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz9j8\" (UniqueName: \"kubernetes.io/projected/5c921bef-fe53-4e64-bf34-1faa504c8a15-kube-api-access-jz9j8\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520854 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cxdq\" (UniqueName: \"kubernetes.io/projected/b99404d9-830e-4e41-ac6d-74f02a5bc179-kube-api-access-2cxdq\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520901 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-run-httpd\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520921 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-config-data\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520939 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.520964 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c921bef-fe53-4e64-bf34-1faa504c8a15-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.541221 4840 scope.go:117] "RemoveContainer" containerID="6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.543250 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0\": container with ID starting with 6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0 not found: ID does not exist" containerID="6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.543288 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0"} err="failed to get container status \"6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0\": rpc error: code = NotFound desc = could not find container \"6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0\": container with ID starting with 6244aa681a2a038c75b79a1fb6870725d983749abe8e03a9849ccaefa222c7b0 not found: ID does not exist" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.543316 4840 scope.go:117] "RemoveContainer" containerID="69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.544049 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654\": container with ID starting with 69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654 not found: ID does not exist" containerID="69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.544082 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654"} err="failed to get container status \"69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654\": rpc error: code = NotFound desc = could not find container \"69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654\": container with ID starting with 69715a44448838e58d40384572a51d5dc3e42fdfa73a771e9534fe54703ff654 not found: ID does not exist" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.544101 4840 scope.go:117] "RemoveContainer" containerID="0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.544425 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47\": container with ID starting with 0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47 not found: ID does not exist" containerID="0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.544455 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47"} err="failed to get container status \"0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47\": rpc error: code = NotFound desc = could not find container \"0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47\": container with ID starting with 0f5a6a0d22e9e95eddb898e846ae359d8a9abf871db3c735fa7ba96a22acde47 not found: ID does not exist" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.544472 4840 scope.go:117] "RemoveContainer" containerID="1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b" Dec 05 15:20:03 crc kubenswrapper[4840]: E1205 15:20:03.544735 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b\": container with ID starting with 1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b not found: ID does not exist" containerID="1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.544771 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b"} err="failed to get container status \"1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b\": rpc error: code = NotFound desc = could not find container \"1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b\": container with ID starting with 1e8e5106eb3968a167e3fa20b1eb112465eaba8f7a484477ff6bfe4a1221d79b not found: ID does not exist" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622636 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c921bef-fe53-4e64-bf34-1faa504c8a15-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622695 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-log-httpd\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622770 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz9j8\" (UniqueName: \"kubernetes.io/projected/5c921bef-fe53-4e64-bf34-1faa504c8a15-kube-api-access-jz9j8\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622813 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cxdq\" (UniqueName: \"kubernetes.io/projected/b99404d9-830e-4e41-ac6d-74f02a5bc179-kube-api-access-2cxdq\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622851 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-run-httpd\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622904 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-config-data\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622927 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.622969 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c921bef-fe53-4e64-bf34-1faa504c8a15-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.623036 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.623070 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-scripts\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.623706 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-run-httpd\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.623854 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-log-httpd\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.629045 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c921bef-fe53-4e64-bf34-1faa504c8a15-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.629084 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-config-data\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.629189 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.629504 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.630608 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c921bef-fe53-4e64-bf34-1faa504c8a15-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.637527 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-scripts\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.640526 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cxdq\" (UniqueName: \"kubernetes.io/projected/b99404d9-830e-4e41-ac6d-74f02a5bc179-kube-api-access-2cxdq\") pod \"ceilometer-0\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " pod="openstack/ceilometer-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.641609 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz9j8\" (UniqueName: \"kubernetes.io/projected/5c921bef-fe53-4e64-bf34-1faa504c8a15-kube-api-access-jz9j8\") pod \"nova-cell0-conductor-0\" (UID: \"5c921bef-fe53-4e64-bf34-1faa504c8a15\") " pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.683153 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:03 crc kubenswrapper[4840]: I1205 15:20:03.834244 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:04 crc kubenswrapper[4840]: I1205 15:20:04.084051 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dead0240-da38-4ccb-8b4b-b537e9732211" path="/var/lib/kubelet/pods/dead0240-da38-4ccb-8b4b-b537e9732211/volumes" Dec 05 15:20:04 crc kubenswrapper[4840]: W1205 15:20:04.224828 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c921bef_fe53_4e64_bf34_1faa504c8a15.slice/crio-1af50554bb411b4b3facb46d2f42b0badb389de8deb36fc3258f64f0be567357 WatchSource:0}: Error finding container 1af50554bb411b4b3facb46d2f42b0badb389de8deb36fc3258f64f0be567357: Status 404 returned error can't find the container with id 1af50554bb411b4b3facb46d2f42b0badb389de8deb36fc3258f64f0be567357 Dec 05 15:20:04 crc kubenswrapper[4840]: I1205 15:20:04.228248 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 15:20:04 crc kubenswrapper[4840]: I1205 15:20:04.267761 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5c921bef-fe53-4e64-bf34-1faa504c8a15","Type":"ContainerStarted","Data":"1af50554bb411b4b3facb46d2f42b0badb389de8deb36fc3258f64f0be567357"} Dec 05 15:20:04 crc kubenswrapper[4840]: I1205 15:20:04.313387 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:05 crc kubenswrapper[4840]: I1205 15:20:05.290756 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerStarted","Data":"17981c646652986a61322d0aab2c744ffd0c1e586fd7d31e1b8e2276b9997af9"} Dec 05 15:20:05 crc kubenswrapper[4840]: I1205 15:20:05.290807 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerStarted","Data":"3552eb6a4ea4154f720d8ce3b98a20ef5456989f69b6de3c78518b57db2aaf2e"} Dec 05 15:20:05 crc kubenswrapper[4840]: I1205 15:20:05.292385 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5c921bef-fe53-4e64-bf34-1faa504c8a15","Type":"ContainerStarted","Data":"dada9f0e2c2f642921e510d02d1fc9a48934c56a36a252248dfd1b95ba27dfb4"} Dec 05 15:20:05 crc kubenswrapper[4840]: I1205 15:20:05.293729 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:06 crc kubenswrapper[4840]: I1205 15:20:06.311855 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerStarted","Data":"e884e3abfa3cfbfb4e8734abec44b9b560571159e4fc9edec8140d2e69cb1496"} Dec 05 15:20:06 crc kubenswrapper[4840]: I1205 15:20:06.313185 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerStarted","Data":"bfd157ab1d04c540a4b5b08232ceaf982a8bc8589ba3dc6d1428a53e63816b07"} Dec 05 15:20:08 crc kubenswrapper[4840]: I1205 15:20:08.340203 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerStarted","Data":"4dee0cacc631e502121c83562826b14812b75ce060925f64005d0bbe5d8b94e3"} Dec 05 15:20:08 crc kubenswrapper[4840]: I1205 15:20:08.340788 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:20:08 crc kubenswrapper[4840]: I1205 15:20:08.361984 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.307140797 podStartE2EDuration="5.361959683s" podCreationTimestamp="2025-12-05 15:20:03 +0000 UTC" firstStartedPulling="2025-12-05 15:20:04.324650689 +0000 UTC m=+1282.665713313" lastFinishedPulling="2025-12-05 15:20:07.379469585 +0000 UTC m=+1285.720532199" observedRunningTime="2025-12-05 15:20:08.358059643 +0000 UTC m=+1286.699122267" watchObservedRunningTime="2025-12-05 15:20:08.361959683 +0000 UTC m=+1286.703022297" Dec 05 15:20:08 crc kubenswrapper[4840]: I1205 15:20:08.369613 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=5.369591708 podStartE2EDuration="5.369591708s" podCreationTimestamp="2025-12-05 15:20:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:05.316019837 +0000 UTC m=+1283.657082451" watchObservedRunningTime="2025-12-05 15:20:08.369591708 +0000 UTC m=+1286.710654322" Dec 05 15:20:13 crc kubenswrapper[4840]: I1205 15:20:13.711721 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.183254 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-mbqt2"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.184653 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.188200 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.188412 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.202901 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-mbqt2"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.376231 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.381610 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.386682 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlcmn\" (UniqueName: \"kubernetes.io/projected/8f56cb32-aedc-48db-bc49-99c6d668ce1b-kube-api-access-hlcmn\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.386777 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-config-data\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.386838 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.386880 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-scripts\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.393170 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.431967 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.467348 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.469417 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.482102 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.496697 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/037d1001-d8b7-453e-bc9b-d4324e553154-logs\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.496796 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqr58\" (UniqueName: \"kubernetes.io/projected/037d1001-d8b7-453e-bc9b-d4324e553154-kube-api-access-cqr58\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.496935 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.522127 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlcmn\" (UniqueName: \"kubernetes.io/projected/8f56cb32-aedc-48db-bc49-99c6d668ce1b-kube-api-access-hlcmn\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.522193 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-config-data\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.522267 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-config-data\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.522341 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.522375 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-scripts\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.534214 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.539839 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.555086 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.557305 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-scripts\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.557424 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.557822 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-config-data\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.563229 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.578764 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.579635 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlcmn\" (UniqueName: \"kubernetes.io/projected/8f56cb32-aedc-48db-bc49-99c6d668ce1b-kube-api-access-hlcmn\") pod \"nova-cell0-cell-mapping-mbqt2\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.595353 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.597139 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.601900 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.619947 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625109 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcsjz\" (UniqueName: \"kubernetes.io/projected/990a48c2-69ad-47c0-a3ce-ee9621c27074-kube-api-access-dcsjz\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625156 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqntk\" (UniqueName: \"kubernetes.io/projected/600817fb-be86-4f40-a561-be53138f62d6-kube-api-access-nqntk\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625220 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxdpv\" (UniqueName: \"kubernetes.io/projected/d40ef110-5f77-4f7c-9628-d7634f536c64-kube-api-access-mxdpv\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625344 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-config-data\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625366 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625420 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/037d1001-d8b7-453e-bc9b-d4324e553154-logs\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625468 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqr58\" (UniqueName: \"kubernetes.io/projected/037d1001-d8b7-453e-bc9b-d4324e553154-kube-api-access-cqr58\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625494 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-config-data\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625820 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.626800 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.626923 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990a48c2-69ad-47c0-a3ce-ee9621c27074-logs\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.626967 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-config-data\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.627090 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.627115 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.625895 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/037d1001-d8b7-453e-bc9b-d4324e553154-logs\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.631363 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.631894 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-config-data\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.650217 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqr58\" (UniqueName: \"kubernetes.io/projected/037d1001-d8b7-453e-bc9b-d4324e553154-kube-api-access-cqr58\") pod \"nova-api-0\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.715305 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-cmbv2"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.717275 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.719501 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.729903 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-config-data\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.730574 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731148 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-svc\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731173 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731223 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990a48c2-69ad-47c0-a3ce-ee9621c27074-logs\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731284 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731303 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731348 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcsjz\" (UniqueName: \"kubernetes.io/projected/990a48c2-69ad-47c0-a3ce-ee9621c27074-kube-api-access-dcsjz\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731369 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqntk\" (UniqueName: \"kubernetes.io/projected/600817fb-be86-4f40-a561-be53138f62d6-kube-api-access-nqntk\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731394 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxdpv\" (UniqueName: \"kubernetes.io/projected/d40ef110-5f77-4f7c-9628-d7634f536c64-kube-api-access-mxdpv\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731409 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjfcn\" (UniqueName: \"kubernetes.io/projected/91d8d92c-10f0-453f-bb63-01bfaa5258c0-kube-api-access-vjfcn\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731454 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731471 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-config\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731514 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731566 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-config-data\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.731588 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.733366 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990a48c2-69ad-47c0-a3ce-ee9621c27074-logs\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.733980 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-config-data\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.737912 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.742446 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-cmbv2"] Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.746709 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-config-data\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.746841 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.749072 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.764727 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.772212 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqntk\" (UniqueName: \"kubernetes.io/projected/600817fb-be86-4f40-a561-be53138f62d6-kube-api-access-nqntk\") pod \"nova-cell1-novncproxy-0\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.772614 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcsjz\" (UniqueName: \"kubernetes.io/projected/990a48c2-69ad-47c0-a3ce-ee9621c27074-kube-api-access-dcsjz\") pod \"nova-metadata-0\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.775279 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxdpv\" (UniqueName: \"kubernetes.io/projected/d40ef110-5f77-4f7c-9628-d7634f536c64-kube-api-access-mxdpv\") pod \"nova-scheduler-0\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.804306 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.810583 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.818999 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.832958 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-svc\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.833002 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.833096 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjfcn\" (UniqueName: \"kubernetes.io/projected/91d8d92c-10f0-453f-bb63-01bfaa5258c0-kube-api-access-vjfcn\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.833127 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.833145 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-config\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.833179 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.834245 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-svc\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.834368 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.834825 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.835002 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.835397 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-config\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:14 crc kubenswrapper[4840]: I1205 15:20:14.852547 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjfcn\" (UniqueName: \"kubernetes.io/projected/91d8d92c-10f0-453f-bb63-01bfaa5258c0-kube-api-access-vjfcn\") pod \"dnsmasq-dns-865f5d856f-cmbv2\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.022285 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.143087 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.260308 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:15 crc kubenswrapper[4840]: W1205 15:20:15.285779 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod600817fb_be86_4f40_a561_be53138f62d6.slice/crio-24961cb45e569f6f9319906ec40f9050364e02132ff89bf42dbc432479d1f232 WatchSource:0}: Error finding container 24961cb45e569f6f9319906ec40f9050364e02132ff89bf42dbc432479d1f232: Status 404 returned error can't find the container with id 24961cb45e569f6f9319906ec40f9050364e02132ff89bf42dbc432479d1f232 Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.316010 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.450273 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmprz"] Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.454586 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.456623 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.460044 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.466051 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"600817fb-be86-4f40-a561-be53138f62d6","Type":"ContainerStarted","Data":"24961cb45e569f6f9319906ec40f9050364e02132ff89bf42dbc432479d1f232"} Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.468659 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"037d1001-d8b7-453e-bc9b-d4324e553154","Type":"ContainerStarted","Data":"67daa1b90702047a1cb73f466285c5ada23407d4ec8b87fb5068ea3d3f48e6f0"} Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.471380 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmprz"] Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.525480 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-mbqt2"] Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.541346 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.549087 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.549244 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86blt\" (UniqueName: \"kubernetes.io/projected/1eb8edca-e2c3-414d-99d6-eb12987292da-kube-api-access-86blt\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.549302 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-scripts\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.549339 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-config-data\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.651147 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86blt\" (UniqueName: \"kubernetes.io/projected/1eb8edca-e2c3-414d-99d6-eb12987292da-kube-api-access-86blt\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.651522 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-scripts\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.651561 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-config-data\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.651706 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.658180 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-scripts\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.658677 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.659046 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-config-data\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.670562 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86blt\" (UniqueName: \"kubernetes.io/projected/1eb8edca-e2c3-414d-99d6-eb12987292da-kube-api-access-86blt\") pod \"nova-cell1-conductor-db-sync-bmprz\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.710396 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:15 crc kubenswrapper[4840]: W1205 15:20:15.717063 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd40ef110_5f77_4f7c_9628_d7634f536c64.slice/crio-dabaa5cc495cfbb7574d347e40180e7159383d091c5383902eea20936a53afdd WatchSource:0}: Error finding container dabaa5cc495cfbb7574d347e40180e7159383d091c5383902eea20936a53afdd: Status 404 returned error can't find the container with id dabaa5cc495cfbb7574d347e40180e7159383d091c5383902eea20936a53afdd Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.737376 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-cmbv2"] Dec 05 15:20:15 crc kubenswrapper[4840]: I1205 15:20:15.777518 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.107743 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmprz"] Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.495749 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d40ef110-5f77-4f7c-9628-d7634f536c64","Type":"ContainerStarted","Data":"dabaa5cc495cfbb7574d347e40180e7159383d091c5383902eea20936a53afdd"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.500736 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmprz" event={"ID":"1eb8edca-e2c3-414d-99d6-eb12987292da","Type":"ContainerStarted","Data":"c918174995a3cd3f297ad0fd560d80a86376530b1c9c16d462e1bc0583c2d611"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.500822 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmprz" event={"ID":"1eb8edca-e2c3-414d-99d6-eb12987292da","Type":"ContainerStarted","Data":"322e6b575479fbd6c84bce685a79433bb613ae11e3c81ba1e3834a12e7431b43"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.519522 4840 generic.go:334] "Generic (PLEG): container finished" podID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerID="4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700" exitCode=0 Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.519620 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" event={"ID":"91d8d92c-10f0-453f-bb63-01bfaa5258c0","Type":"ContainerDied","Data":"4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.519652 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" event={"ID":"91d8d92c-10f0-453f-bb63-01bfaa5258c0","Type":"ContainerStarted","Data":"49b928f6e349c97d05a1257a7e17065d7d58b21ebfceb8181558a7ce601fa4fb"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.525751 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"990a48c2-69ad-47c0-a3ce-ee9621c27074","Type":"ContainerStarted","Data":"5e3d8012f64544c06c199025250a752293d56efcab5e1127a84d0e0ee537c18e"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.526106 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-bmprz" podStartSLOduration=1.5260837390000002 podStartE2EDuration="1.526083739s" podCreationTimestamp="2025-12-05 15:20:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:16.522377285 +0000 UTC m=+1294.863439889" watchObservedRunningTime="2025-12-05 15:20:16.526083739 +0000 UTC m=+1294.867146353" Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.532832 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mbqt2" event={"ID":"8f56cb32-aedc-48db-bc49-99c6d668ce1b","Type":"ContainerStarted","Data":"bfe6a586db863e9a48d639405a417c4e1ca08c0214c92fe8d4150b6d3da3a689"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.533033 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mbqt2" event={"ID":"8f56cb32-aedc-48db-bc49-99c6d668ce1b","Type":"ContainerStarted","Data":"9a286a84119eb3082739c8a4a6612ecbf9552ba86afb9881ee02a788c3108ce3"} Dec 05 15:20:16 crc kubenswrapper[4840]: I1205 15:20:16.576619 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-mbqt2" podStartSLOduration=2.5765820010000002 podStartE2EDuration="2.576582001s" podCreationTimestamp="2025-12-05 15:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:16.571965021 +0000 UTC m=+1294.913027635" watchObservedRunningTime="2025-12-05 15:20:16.576582001 +0000 UTC m=+1294.917644605" Dec 05 15:20:18 crc kubenswrapper[4840]: I1205 15:20:18.029608 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:18 crc kubenswrapper[4840]: I1205 15:20:18.041729 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.561851 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"600817fb-be86-4f40-a561-be53138f62d6","Type":"ContainerStarted","Data":"ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f"} Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.561976 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="600817fb-be86-4f40-a561-be53138f62d6" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f" gracePeriod=30 Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.565731 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" event={"ID":"91d8d92c-10f0-453f-bb63-01bfaa5258c0","Type":"ContainerStarted","Data":"7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15"} Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.565991 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.570963 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"037d1001-d8b7-453e-bc9b-d4324e553154","Type":"ContainerStarted","Data":"36438ddad0aca0bd48c529f5d2d755a35b06a9e2cfc019aba33a8727f08b2aed"} Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.571177 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"037d1001-d8b7-453e-bc9b-d4324e553154","Type":"ContainerStarted","Data":"4653ae4f6ccf64e84418344bbeb5ac12d4c1c8bacf808246fcf600a23d41f91d"} Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.572961 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d40ef110-5f77-4f7c-9628-d7634f536c64","Type":"ContainerStarted","Data":"3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3"} Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.574973 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"990a48c2-69ad-47c0-a3ce-ee9621c27074","Type":"ContainerStarted","Data":"3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0"} Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.575079 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"990a48c2-69ad-47c0-a3ce-ee9621c27074","Type":"ContainerStarted","Data":"626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87"} Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.575132 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-log" containerID="cri-o://626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87" gracePeriod=30 Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.575171 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-metadata" containerID="cri-o://3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0" gracePeriod=30 Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.594421 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.046297473 podStartE2EDuration="5.594390285s" podCreationTimestamp="2025-12-05 15:20:14 +0000 UTC" firstStartedPulling="2025-12-05 15:20:15.290186328 +0000 UTC m=+1293.631248942" lastFinishedPulling="2025-12-05 15:20:18.83827912 +0000 UTC m=+1297.179341754" observedRunningTime="2025-12-05 15:20:19.578038715 +0000 UTC m=+1297.919101339" watchObservedRunningTime="2025-12-05 15:20:19.594390285 +0000 UTC m=+1297.935452909" Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.601546 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" podStartSLOduration=5.601525046 podStartE2EDuration="5.601525046s" podCreationTimestamp="2025-12-05 15:20:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:19.596158745 +0000 UTC m=+1297.937221359" watchObservedRunningTime="2025-12-05 15:20:19.601525046 +0000 UTC m=+1297.942587660" Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.626203 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.109658427 podStartE2EDuration="5.62618664s" podCreationTimestamp="2025-12-05 15:20:14 +0000 UTC" firstStartedPulling="2025-12-05 15:20:15.318728452 +0000 UTC m=+1293.659791066" lastFinishedPulling="2025-12-05 15:20:18.835256665 +0000 UTC m=+1297.176319279" observedRunningTime="2025-12-05 15:20:19.62083021 +0000 UTC m=+1297.961892834" watchObservedRunningTime="2025-12-05 15:20:19.62618664 +0000 UTC m=+1297.967249254" Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.646520 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.42520241 podStartE2EDuration="5.646498262s" podCreationTimestamp="2025-12-05 15:20:14 +0000 UTC" firstStartedPulling="2025-12-05 15:20:15.616978918 +0000 UTC m=+1293.958041532" lastFinishedPulling="2025-12-05 15:20:18.83827477 +0000 UTC m=+1297.179337384" observedRunningTime="2025-12-05 15:20:19.642319995 +0000 UTC m=+1297.983382609" watchObservedRunningTime="2025-12-05 15:20:19.646498262 +0000 UTC m=+1297.987560876" Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.811072 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.819233 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 15:20:19 crc kubenswrapper[4840]: I1205 15:20:19.819273 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 15:20:20 crc kubenswrapper[4840]: I1205 15:20:20.024830 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 15:20:20 crc kubenswrapper[4840]: I1205 15:20:20.588086 4840 generic.go:334] "Generic (PLEG): container finished" podID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerID="626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87" exitCode=143 Dec 05 15:20:20 crc kubenswrapper[4840]: I1205 15:20:20.588187 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"990a48c2-69ad-47c0-a3ce-ee9621c27074","Type":"ContainerDied","Data":"626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87"} Dec 05 15:20:23 crc kubenswrapper[4840]: I1205 15:20:23.618310 4840 generic.go:334] "Generic (PLEG): container finished" podID="1eb8edca-e2c3-414d-99d6-eb12987292da" containerID="c918174995a3cd3f297ad0fd560d80a86376530b1c9c16d462e1bc0583c2d611" exitCode=0 Dec 05 15:20:23 crc kubenswrapper[4840]: I1205 15:20:23.618371 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmprz" event={"ID":"1eb8edca-e2c3-414d-99d6-eb12987292da","Type":"ContainerDied","Data":"c918174995a3cd3f297ad0fd560d80a86376530b1c9c16d462e1bc0583c2d611"} Dec 05 15:20:23 crc kubenswrapper[4840]: I1205 15:20:23.645070 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=6.542150715 podStartE2EDuration="9.645046804s" podCreationTimestamp="2025-12-05 15:20:14 +0000 UTC" firstStartedPulling="2025-12-05 15:20:15.74350512 +0000 UTC m=+1294.084567734" lastFinishedPulling="2025-12-05 15:20:18.846401209 +0000 UTC m=+1297.187463823" observedRunningTime="2025-12-05 15:20:19.667110062 +0000 UTC m=+1298.008172676" watchObservedRunningTime="2025-12-05 15:20:23.645046804 +0000 UTC m=+1301.986109418" Dec 05 15:20:24 crc kubenswrapper[4840]: I1205 15:20:24.630559 4840 generic.go:334] "Generic (PLEG): container finished" podID="8f56cb32-aedc-48db-bc49-99c6d668ce1b" containerID="bfe6a586db863e9a48d639405a417c4e1ca08c0214c92fe8d4150b6d3da3a689" exitCode=0 Dec 05 15:20:24 crc kubenswrapper[4840]: I1205 15:20:24.630788 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mbqt2" event={"ID":"8f56cb32-aedc-48db-bc49-99c6d668ce1b","Type":"ContainerDied","Data":"bfe6a586db863e9a48d639405a417c4e1ca08c0214c92fe8d4150b6d3da3a689"} Dec 05 15:20:24 crc kubenswrapper[4840]: I1205 15:20:24.720844 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 15:20:24 crc kubenswrapper[4840]: I1205 15:20:24.720906 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.007601 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.023536 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.066713 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.145892 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.155978 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-scripts\") pod \"1eb8edca-e2c3-414d-99d6-eb12987292da\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.156157 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-combined-ca-bundle\") pod \"1eb8edca-e2c3-414d-99d6-eb12987292da\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.156442 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86blt\" (UniqueName: \"kubernetes.io/projected/1eb8edca-e2c3-414d-99d6-eb12987292da-kube-api-access-86blt\") pod \"1eb8edca-e2c3-414d-99d6-eb12987292da\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.156488 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-config-data\") pod \"1eb8edca-e2c3-414d-99d6-eb12987292da\" (UID: \"1eb8edca-e2c3-414d-99d6-eb12987292da\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.164180 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-scripts" (OuterVolumeSpecName: "scripts") pod "1eb8edca-e2c3-414d-99d6-eb12987292da" (UID: "1eb8edca-e2c3-414d-99d6-eb12987292da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.196455 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eb8edca-e2c3-414d-99d6-eb12987292da-kube-api-access-86blt" (OuterVolumeSpecName: "kube-api-access-86blt") pod "1eb8edca-e2c3-414d-99d6-eb12987292da" (UID: "1eb8edca-e2c3-414d-99d6-eb12987292da"). InnerVolumeSpecName "kube-api-access-86blt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.204832 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-config-data" (OuterVolumeSpecName: "config-data") pod "1eb8edca-e2c3-414d-99d6-eb12987292da" (UID: "1eb8edca-e2c3-414d-99d6-eb12987292da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.226608 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1eb8edca-e2c3-414d-99d6-eb12987292da" (UID: "1eb8edca-e2c3-414d-99d6-eb12987292da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.239271 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-6ccth"] Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.239632 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" podUID="4be94020-016c-4262-ade5-165fe35d6a2c" containerName="dnsmasq-dns" containerID="cri-o://7721b0eefb969a6dd2ee07d134959fd2c68f46365e33634e8d163ef93d8dc7a2" gracePeriod=10 Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.265995 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.266053 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.266072 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86blt\" (UniqueName: \"kubernetes.io/projected/1eb8edca-e2c3-414d-99d6-eb12987292da-kube-api-access-86blt\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.266086 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1eb8edca-e2c3-414d-99d6-eb12987292da-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.647157 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmprz" event={"ID":"1eb8edca-e2c3-414d-99d6-eb12987292da","Type":"ContainerDied","Data":"322e6b575479fbd6c84bce685a79433bb613ae11e3c81ba1e3834a12e7431b43"} Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.647202 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="322e6b575479fbd6c84bce685a79433bb613ae11e3c81ba1e3834a12e7431b43" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.647275 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmprz" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.652528 4840 generic.go:334] "Generic (PLEG): container finished" podID="4be94020-016c-4262-ade5-165fe35d6a2c" containerID="7721b0eefb969a6dd2ee07d134959fd2c68f46365e33634e8d163ef93d8dc7a2" exitCode=0 Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.652616 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" event={"ID":"4be94020-016c-4262-ade5-165fe35d6a2c","Type":"ContainerDied","Data":"7721b0eefb969a6dd2ee07d134959fd2c68f46365e33634e8d163ef93d8dc7a2"} Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.738156 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 15:20:25 crc kubenswrapper[4840]: E1205 15:20:25.738907 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb8edca-e2c3-414d-99d6-eb12987292da" containerName="nova-cell1-conductor-db-sync" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.738921 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb8edca-e2c3-414d-99d6-eb12987292da" containerName="nova-cell1-conductor-db-sync" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.739093 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb8edca-e2c3-414d-99d6-eb12987292da" containerName="nova-cell1-conductor-db-sync" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.739759 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.744038 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.750118 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.762083 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.783123 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.783181 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thc5r\" (UniqueName: \"kubernetes.io/projected/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-kube-api-access-thc5r\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.783221 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.797284 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.804036 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.804109 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.884547 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2w5h\" (UniqueName: \"kubernetes.io/projected/4be94020-016c-4262-ade5-165fe35d6a2c-kube-api-access-q2w5h\") pod \"4be94020-016c-4262-ade5-165fe35d6a2c\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.884617 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-sb\") pod \"4be94020-016c-4262-ade5-165fe35d6a2c\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.884731 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-swift-storage-0\") pod \"4be94020-016c-4262-ade5-165fe35d6a2c\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.884891 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-svc\") pod \"4be94020-016c-4262-ade5-165fe35d6a2c\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.884998 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-config\") pod \"4be94020-016c-4262-ade5-165fe35d6a2c\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.885036 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-nb\") pod \"4be94020-016c-4262-ade5-165fe35d6a2c\" (UID: \"4be94020-016c-4262-ade5-165fe35d6a2c\") " Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.885336 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.885395 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thc5r\" (UniqueName: \"kubernetes.io/projected/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-kube-api-access-thc5r\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.885457 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.890997 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4be94020-016c-4262-ade5-165fe35d6a2c-kube-api-access-q2w5h" (OuterVolumeSpecName: "kube-api-access-q2w5h") pod "4be94020-016c-4262-ade5-165fe35d6a2c" (UID: "4be94020-016c-4262-ade5-165fe35d6a2c"). InnerVolumeSpecName "kube-api-access-q2w5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.891230 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.895535 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.910866 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thc5r\" (UniqueName: \"kubernetes.io/projected/ef118f1d-aaf7-48be-b9aa-ec84d23ea999-kube-api-access-thc5r\") pod \"nova-cell1-conductor-0\" (UID: \"ef118f1d-aaf7-48be-b9aa-ec84d23ea999\") " pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.962639 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4be94020-016c-4262-ade5-165fe35d6a2c" (UID: "4be94020-016c-4262-ade5-165fe35d6a2c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.982197 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4be94020-016c-4262-ade5-165fe35d6a2c" (UID: "4be94020-016c-4262-ade5-165fe35d6a2c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.987258 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.987288 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2w5h\" (UniqueName: \"kubernetes.io/projected/4be94020-016c-4262-ade5-165fe35d6a2c-kube-api-access-q2w5h\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:25 crc kubenswrapper[4840]: I1205 15:20:25.987300 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.016134 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4be94020-016c-4262-ade5-165fe35d6a2c" (UID: "4be94020-016c-4262-ade5-165fe35d6a2c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.026759 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-config" (OuterVolumeSpecName: "config") pod "4be94020-016c-4262-ade5-165fe35d6a2c" (UID: "4be94020-016c-4262-ade5-165fe35d6a2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.057313 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4be94020-016c-4262-ade5-165fe35d6a2c" (UID: "4be94020-016c-4262-ade5-165fe35d6a2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.071413 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.088728 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.088761 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.088770 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4be94020-016c-4262-ade5-165fe35d6a2c-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.193573 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.294417 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlcmn\" (UniqueName: \"kubernetes.io/projected/8f56cb32-aedc-48db-bc49-99c6d668ce1b-kube-api-access-hlcmn\") pod \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.302745 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f56cb32-aedc-48db-bc49-99c6d668ce1b-kube-api-access-hlcmn" (OuterVolumeSpecName: "kube-api-access-hlcmn") pod "8f56cb32-aedc-48db-bc49-99c6d668ce1b" (UID: "8f56cb32-aedc-48db-bc49-99c6d668ce1b"). InnerVolumeSpecName "kube-api-access-hlcmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.396899 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-scripts\") pod \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.397004 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-combined-ca-bundle\") pod \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.397129 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-config-data\") pod \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\" (UID: \"8f56cb32-aedc-48db-bc49-99c6d668ce1b\") " Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.397507 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlcmn\" (UniqueName: \"kubernetes.io/projected/8f56cb32-aedc-48db-bc49-99c6d668ce1b-kube-api-access-hlcmn\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.408168 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-scripts" (OuterVolumeSpecName: "scripts") pod "8f56cb32-aedc-48db-bc49-99c6d668ce1b" (UID: "8f56cb32-aedc-48db-bc49-99c6d668ce1b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.455220 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f56cb32-aedc-48db-bc49-99c6d668ce1b" (UID: "8f56cb32-aedc-48db-bc49-99c6d668ce1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.462251 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-config-data" (OuterVolumeSpecName: "config-data") pod "8f56cb32-aedc-48db-bc49-99c6d668ce1b" (UID: "8f56cb32-aedc-48db-bc49-99c6d668ce1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.499220 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.499251 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.499260 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f56cb32-aedc-48db-bc49-99c6d668ce1b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.599024 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 15:20:26 crc kubenswrapper[4840]: W1205 15:20:26.601898 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef118f1d_aaf7_48be_b9aa_ec84d23ea999.slice/crio-778f40756d9b7256bf52d74d58e0e03e61a6f2b5b1ff1c86f8cca1bf7861bbac WatchSource:0}: Error finding container 778f40756d9b7256bf52d74d58e0e03e61a6f2b5b1ff1c86f8cca1bf7861bbac: Status 404 returned error can't find the container with id 778f40756d9b7256bf52d74d58e0e03e61a6f2b5b1ff1c86f8cca1bf7861bbac Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.663816 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" event={"ID":"4be94020-016c-4262-ade5-165fe35d6a2c","Type":"ContainerDied","Data":"61ecb5ae14c60ae1487f9cfd8112d482bb378dbd8fef707bea915aab6429a085"} Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.663860 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-6ccth" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.663940 4840 scope.go:117] "RemoveContainer" containerID="7721b0eefb969a6dd2ee07d134959fd2c68f46365e33634e8d163ef93d8dc7a2" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.676274 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mbqt2" event={"ID":"8f56cb32-aedc-48db-bc49-99c6d668ce1b","Type":"ContainerDied","Data":"9a286a84119eb3082739c8a4a6612ecbf9552ba86afb9881ee02a788c3108ce3"} Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.676338 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a286a84119eb3082739c8a4a6612ecbf9552ba86afb9881ee02a788c3108ce3" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.676403 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mbqt2" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.681675 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ef118f1d-aaf7-48be-b9aa-ec84d23ea999","Type":"ContainerStarted","Data":"778f40756d9b7256bf52d74d58e0e03e61a6f2b5b1ff1c86f8cca1bf7861bbac"} Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.696809 4840 scope.go:117] "RemoveContainer" containerID="6abaca6d1e9694835ca66f3efac358460688024cd305583826a05c14fe064105" Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.710713 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-6ccth"] Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.720501 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-6ccth"] Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.803429 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.803686 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-log" containerID="cri-o://4653ae4f6ccf64e84418344bbeb5ac12d4c1c8bacf808246fcf600a23d41f91d" gracePeriod=30 Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.803764 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-api" containerID="cri-o://36438ddad0aca0bd48c529f5d2d755a35b06a9e2cfc019aba33a8727f08b2aed" gracePeriod=30 Dec 05 15:20:26 crc kubenswrapper[4840]: I1205 15:20:26.827692 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:27 crc kubenswrapper[4840]: I1205 15:20:27.692554 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ef118f1d-aaf7-48be-b9aa-ec84d23ea999","Type":"ContainerStarted","Data":"ebbb34d9ae99e802b1a500fca64ea44d569858f64b9a826f500a0d1290126a2c"} Dec 05 15:20:27 crc kubenswrapper[4840]: I1205 15:20:27.693020 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:27 crc kubenswrapper[4840]: I1205 15:20:27.694782 4840 generic.go:334] "Generic (PLEG): container finished" podID="037d1001-d8b7-453e-bc9b-d4324e553154" containerID="4653ae4f6ccf64e84418344bbeb5ac12d4c1c8bacf808246fcf600a23d41f91d" exitCode=143 Dec 05 15:20:27 crc kubenswrapper[4840]: I1205 15:20:27.694882 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"037d1001-d8b7-453e-bc9b-d4324e553154","Type":"ContainerDied","Data":"4653ae4f6ccf64e84418344bbeb5ac12d4c1c8bacf808246fcf600a23d41f91d"} Dec 05 15:20:27 crc kubenswrapper[4840]: I1205 15:20:27.701271 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d40ef110-5f77-4f7c-9628-d7634f536c64" containerName="nova-scheduler-scheduler" containerID="cri-o://3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3" gracePeriod=30 Dec 05 15:20:27 crc kubenswrapper[4840]: I1205 15:20:27.709681 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.7096574369999997 podStartE2EDuration="2.709657437s" podCreationTimestamp="2025-12-05 15:20:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:27.707009383 +0000 UTC m=+1306.048072017" watchObservedRunningTime="2025-12-05 15:20:27.709657437 +0000 UTC m=+1306.050720061" Dec 05 15:20:28 crc kubenswrapper[4840]: I1205 15:20:28.078684 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4be94020-016c-4262-ade5-165fe35d6a2c" path="/var/lib/kubelet/pods/4be94020-016c-4262-ade5-165fe35d6a2c/volumes" Dec 05 15:20:30 crc kubenswrapper[4840]: E1205 15:20:30.026163 4840 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 15:20:30 crc kubenswrapper[4840]: E1205 15:20:30.027769 4840 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 15:20:30 crc kubenswrapper[4840]: E1205 15:20:30.029230 4840 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 15:20:30 crc kubenswrapper[4840]: E1205 15:20:30.029296 4840 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d40ef110-5f77-4f7c-9628-d7634f536c64" containerName="nova-scheduler-scheduler" Dec 05 15:20:30 crc kubenswrapper[4840]: I1205 15:20:30.750421 4840 generic.go:334] "Generic (PLEG): container finished" podID="d40ef110-5f77-4f7c-9628-d7634f536c64" containerID="3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3" exitCode=0 Dec 05 15:20:30 crc kubenswrapper[4840]: I1205 15:20:30.750460 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d40ef110-5f77-4f7c-9628-d7634f536c64","Type":"ContainerDied","Data":"3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3"} Dec 05 15:20:30 crc kubenswrapper[4840]: I1205 15:20:30.987661 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.099228 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.132958 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-combined-ca-bundle\") pod \"d40ef110-5f77-4f7c-9628-d7634f536c64\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.133078 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxdpv\" (UniqueName: \"kubernetes.io/projected/d40ef110-5f77-4f7c-9628-d7634f536c64-kube-api-access-mxdpv\") pod \"d40ef110-5f77-4f7c-9628-d7634f536c64\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.133194 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-config-data\") pod \"d40ef110-5f77-4f7c-9628-d7634f536c64\" (UID: \"d40ef110-5f77-4f7c-9628-d7634f536c64\") " Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.140472 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d40ef110-5f77-4f7c-9628-d7634f536c64-kube-api-access-mxdpv" (OuterVolumeSpecName: "kube-api-access-mxdpv") pod "d40ef110-5f77-4f7c-9628-d7634f536c64" (UID: "d40ef110-5f77-4f7c-9628-d7634f536c64"). InnerVolumeSpecName "kube-api-access-mxdpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.169559 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d40ef110-5f77-4f7c-9628-d7634f536c64" (UID: "d40ef110-5f77-4f7c-9628-d7634f536c64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.175803 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-config-data" (OuterVolumeSpecName: "config-data") pod "d40ef110-5f77-4f7c-9628-d7634f536c64" (UID: "d40ef110-5f77-4f7c-9628-d7634f536c64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.236033 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxdpv\" (UniqueName: \"kubernetes.io/projected/d40ef110-5f77-4f7c-9628-d7634f536c64-kube-api-access-mxdpv\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.236075 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.236099 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d40ef110-5f77-4f7c-9628-d7634f536c64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.763212 4840 generic.go:334] "Generic (PLEG): container finished" podID="037d1001-d8b7-453e-bc9b-d4324e553154" containerID="36438ddad0aca0bd48c529f5d2d755a35b06a9e2cfc019aba33a8727f08b2aed" exitCode=0 Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.763328 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"037d1001-d8b7-453e-bc9b-d4324e553154","Type":"ContainerDied","Data":"36438ddad0aca0bd48c529f5d2d755a35b06a9e2cfc019aba33a8727f08b2aed"} Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.763392 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"037d1001-d8b7-453e-bc9b-d4324e553154","Type":"ContainerDied","Data":"67daa1b90702047a1cb73f466285c5ada23407d4ec8b87fb5068ea3d3f48e6f0"} Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.763408 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67daa1b90702047a1cb73f466285c5ada23407d4ec8b87fb5068ea3d3f48e6f0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.765257 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d40ef110-5f77-4f7c-9628-d7634f536c64","Type":"ContainerDied","Data":"dabaa5cc495cfbb7574d347e40180e7159383d091c5383902eea20936a53afdd"} Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.765310 4840 scope.go:117] "RemoveContainer" containerID="3fe053dc1e064ca864ae7231664a5f5af570637d0770106257183997f8d21ac3" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.765443 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.780832 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.821013 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.834632 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.853682 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:31 crc kubenswrapper[4840]: E1205 15:20:31.854192 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d40ef110-5f77-4f7c-9628-d7634f536c64" containerName="nova-scheduler-scheduler" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854211 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d40ef110-5f77-4f7c-9628-d7634f536c64" containerName="nova-scheduler-scheduler" Dec 05 15:20:31 crc kubenswrapper[4840]: E1205 15:20:31.854230 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be94020-016c-4262-ade5-165fe35d6a2c" containerName="dnsmasq-dns" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854236 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be94020-016c-4262-ade5-165fe35d6a2c" containerName="dnsmasq-dns" Dec 05 15:20:31 crc kubenswrapper[4840]: E1205 15:20:31.854245 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-log" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854251 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-log" Dec 05 15:20:31 crc kubenswrapper[4840]: E1205 15:20:31.854271 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f56cb32-aedc-48db-bc49-99c6d668ce1b" containerName="nova-manage" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854276 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f56cb32-aedc-48db-bc49-99c6d668ce1b" containerName="nova-manage" Dec 05 15:20:31 crc kubenswrapper[4840]: E1205 15:20:31.854291 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-api" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854299 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-api" Dec 05 15:20:31 crc kubenswrapper[4840]: E1205 15:20:31.854308 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be94020-016c-4262-ade5-165fe35d6a2c" containerName="init" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854313 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be94020-016c-4262-ade5-165fe35d6a2c" containerName="init" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854522 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="4be94020-016c-4262-ade5-165fe35d6a2c" containerName="dnsmasq-dns" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854540 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-log" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854556 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f56cb32-aedc-48db-bc49-99c6d668ce1b" containerName="nova-manage" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854566 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" containerName="nova-api-api" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.854580 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d40ef110-5f77-4f7c-9628-d7634f536c64" containerName="nova-scheduler-scheduler" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.855320 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.858299 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.872702 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.976373 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqr58\" (UniqueName: \"kubernetes.io/projected/037d1001-d8b7-453e-bc9b-d4324e553154-kube-api-access-cqr58\") pod \"037d1001-d8b7-453e-bc9b-d4324e553154\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.976643 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-combined-ca-bundle\") pod \"037d1001-d8b7-453e-bc9b-d4324e553154\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.976791 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-config-data\") pod \"037d1001-d8b7-453e-bc9b-d4324e553154\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.976970 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/037d1001-d8b7-453e-bc9b-d4324e553154-logs\") pod \"037d1001-d8b7-453e-bc9b-d4324e553154\" (UID: \"037d1001-d8b7-453e-bc9b-d4324e553154\") " Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.977360 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-config-data\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.977496 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.977419 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/037d1001-d8b7-453e-bc9b-d4324e553154-logs" (OuterVolumeSpecName: "logs") pod "037d1001-d8b7-453e-bc9b-d4324e553154" (UID: "037d1001-d8b7-453e-bc9b-d4324e553154"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.977681 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htt4n\" (UniqueName: \"kubernetes.io/projected/1d7097e8-da3f-444a-9e06-20960a21b38b-kube-api-access-htt4n\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.977811 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/037d1001-d8b7-453e-bc9b-d4324e553154-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:31 crc kubenswrapper[4840]: I1205 15:20:31.994163 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/037d1001-d8b7-453e-bc9b-d4324e553154-kube-api-access-cqr58" (OuterVolumeSpecName: "kube-api-access-cqr58") pod "037d1001-d8b7-453e-bc9b-d4324e553154" (UID: "037d1001-d8b7-453e-bc9b-d4324e553154"). InnerVolumeSpecName "kube-api-access-cqr58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.005358 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "037d1001-d8b7-453e-bc9b-d4324e553154" (UID: "037d1001-d8b7-453e-bc9b-d4324e553154"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.008528 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-config-data" (OuterVolumeSpecName: "config-data") pod "037d1001-d8b7-453e-bc9b-d4324e553154" (UID: "037d1001-d8b7-453e-bc9b-d4324e553154"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.084158 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-config-data\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.084215 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.084308 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htt4n\" (UniqueName: \"kubernetes.io/projected/1d7097e8-da3f-444a-9e06-20960a21b38b-kube-api-access-htt4n\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.084362 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqr58\" (UniqueName: \"kubernetes.io/projected/037d1001-d8b7-453e-bc9b-d4324e553154-kube-api-access-cqr58\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.084376 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.084388 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037d1001-d8b7-453e-bc9b-d4324e553154-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.088180 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-config-data\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.089511 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d40ef110-5f77-4f7c-9628-d7634f536c64" path="/var/lib/kubelet/pods/d40ef110-5f77-4f7c-9628-d7634f536c64/volumes" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.092126 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.101149 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htt4n\" (UniqueName: \"kubernetes.io/projected/1d7097e8-da3f-444a-9e06-20960a21b38b-kube-api-access-htt4n\") pod \"nova-scheduler-0\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " pod="openstack/nova-scheduler-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.179215 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.648626 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:20:32 crc kubenswrapper[4840]: W1205 15:20:32.649291 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d7097e8_da3f_444a_9e06_20960a21b38b.slice/crio-1b643f3d814045692a79b0011029bba70e606dbc1a120c16eeceeac8760d40cc WatchSource:0}: Error finding container 1b643f3d814045692a79b0011029bba70e606dbc1a120c16eeceeac8760d40cc: Status 404 returned error can't find the container with id 1b643f3d814045692a79b0011029bba70e606dbc1a120c16eeceeac8760d40cc Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.779666 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d7097e8-da3f-444a-9e06-20960a21b38b","Type":"ContainerStarted","Data":"1b643f3d814045692a79b0011029bba70e606dbc1a120c16eeceeac8760d40cc"} Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.781287 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.820244 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.840418 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.853147 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.854766 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.856633 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.865377 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.880935 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcfed2c6-10b9-4745-b2c3-b461eab145bb-logs\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.880976 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.881005 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-config-data\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.881110 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l64kx\" (UniqueName: \"kubernetes.io/projected/fcfed2c6-10b9-4745-b2c3-b461eab145bb-kube-api-access-l64kx\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.983303 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcfed2c6-10b9-4745-b2c3-b461eab145bb-logs\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.983369 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.983403 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-config-data\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.983547 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l64kx\" (UniqueName: \"kubernetes.io/projected/fcfed2c6-10b9-4745-b2c3-b461eab145bb-kube-api-access-l64kx\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.984331 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcfed2c6-10b9-4745-b2c3-b461eab145bb-logs\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.988184 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:32 crc kubenswrapper[4840]: I1205 15:20:32.988412 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-config-data\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:33 crc kubenswrapper[4840]: I1205 15:20:33.011843 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l64kx\" (UniqueName: \"kubernetes.io/projected/fcfed2c6-10b9-4745-b2c3-b461eab145bb-kube-api-access-l64kx\") pod \"nova-api-0\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " pod="openstack/nova-api-0" Dec 05 15:20:33 crc kubenswrapper[4840]: I1205 15:20:33.169966 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:20:33 crc kubenswrapper[4840]: I1205 15:20:33.604520 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:33 crc kubenswrapper[4840]: I1205 15:20:33.828770 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcfed2c6-10b9-4745-b2c3-b461eab145bb","Type":"ContainerStarted","Data":"49f9c853b360eb9cce93959b1ce7dfae531a7f93e58cd2ae3f7d25cb9de51000"} Dec 05 15:20:33 crc kubenswrapper[4840]: I1205 15:20:33.831266 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d7097e8-da3f-444a-9e06-20960a21b38b","Type":"ContainerStarted","Data":"23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f"} Dec 05 15:20:33 crc kubenswrapper[4840]: I1205 15:20:33.845212 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 15:20:33 crc kubenswrapper[4840]: I1205 15:20:33.846940 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.846925947 podStartE2EDuration="2.846925947s" podCreationTimestamp="2025-12-05 15:20:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:33.845791635 +0000 UTC m=+1312.186854249" watchObservedRunningTime="2025-12-05 15:20:33.846925947 +0000 UTC m=+1312.187988561" Dec 05 15:20:34 crc kubenswrapper[4840]: I1205 15:20:34.097772 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="037d1001-d8b7-453e-bc9b-d4324e553154" path="/var/lib/kubelet/pods/037d1001-d8b7-453e-bc9b-d4324e553154/volumes" Dec 05 15:20:34 crc kubenswrapper[4840]: I1205 15:20:34.968421 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcfed2c6-10b9-4745-b2c3-b461eab145bb","Type":"ContainerStarted","Data":"5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0"} Dec 05 15:20:34 crc kubenswrapper[4840]: I1205 15:20:34.970367 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcfed2c6-10b9-4745-b2c3-b461eab145bb","Type":"ContainerStarted","Data":"11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56"} Dec 05 15:20:34 crc kubenswrapper[4840]: I1205 15:20:34.989886 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.989853791 podStartE2EDuration="2.989853791s" podCreationTimestamp="2025-12-05 15:20:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:34.987031361 +0000 UTC m=+1313.328093975" watchObservedRunningTime="2025-12-05 15:20:34.989853791 +0000 UTC m=+1313.330916405" Dec 05 15:20:37 crc kubenswrapper[4840]: I1205 15:20:37.179717 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 15:20:37 crc kubenswrapper[4840]: I1205 15:20:37.915785 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:20:37 crc kubenswrapper[4840]: I1205 15:20:37.916039 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="2d91fd7f-f657-458d-9f80-3d915e5fa97d" containerName="kube-state-metrics" containerID="cri-o://262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96" gracePeriod=30 Dec 05 15:20:38 crc kubenswrapper[4840]: I1205 15:20:38.454024 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 15:20:38 crc kubenswrapper[4840]: I1205 15:20:38.530188 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8wwt\" (UniqueName: \"kubernetes.io/projected/2d91fd7f-f657-458d-9f80-3d915e5fa97d-kube-api-access-n8wwt\") pod \"2d91fd7f-f657-458d-9f80-3d915e5fa97d\" (UID: \"2d91fd7f-f657-458d-9f80-3d915e5fa97d\") " Dec 05 15:20:38 crc kubenswrapper[4840]: I1205 15:20:38.540331 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d91fd7f-f657-458d-9f80-3d915e5fa97d-kube-api-access-n8wwt" (OuterVolumeSpecName: "kube-api-access-n8wwt") pod "2d91fd7f-f657-458d-9f80-3d915e5fa97d" (UID: "2d91fd7f-f657-458d-9f80-3d915e5fa97d"). InnerVolumeSpecName "kube-api-access-n8wwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:38 crc kubenswrapper[4840]: I1205 15:20:38.635223 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8wwt\" (UniqueName: \"kubernetes.io/projected/2d91fd7f-f657-458d-9f80-3d915e5fa97d-kube-api-access-n8wwt\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:38.999746 4840 generic.go:334] "Generic (PLEG): container finished" podID="2d91fd7f-f657-458d-9f80-3d915e5fa97d" containerID="262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96" exitCode=2 Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:38.999811 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2d91fd7f-f657-458d-9f80-3d915e5fa97d","Type":"ContainerDied","Data":"262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96"} Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:38.999850 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2d91fd7f-f657-458d-9f80-3d915e5fa97d","Type":"ContainerDied","Data":"55a87ef9ec0a0b8720b5a9e975dd339d1fdb2a0e551083319bd22caafdf1cf1f"} Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:38.999906 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:38.999933 4840 scope.go:117] "RemoveContainer" containerID="262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.083936 4840 scope.go:117] "RemoveContainer" containerID="262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96" Dec 05 15:20:39 crc kubenswrapper[4840]: E1205 15:20:39.095033 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96\": container with ID starting with 262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96 not found: ID does not exist" containerID="262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.095089 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96"} err="failed to get container status \"262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96\": rpc error: code = NotFound desc = could not find container \"262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96\": container with ID starting with 262d6552c5cf1ccafbf2cfc4d4e36143bdd78653f40b2f09e584a26871ad3c96 not found: ID does not exist" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.135983 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.155537 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.177927 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:20:39 crc kubenswrapper[4840]: E1205 15:20:39.178468 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d91fd7f-f657-458d-9f80-3d915e5fa97d" containerName="kube-state-metrics" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.178483 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d91fd7f-f657-458d-9f80-3d915e5fa97d" containerName="kube-state-metrics" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.178733 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d91fd7f-f657-458d-9f80-3d915e5fa97d" containerName="kube-state-metrics" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.179585 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.181951 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.182256 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.185301 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.355721 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45xb6\" (UniqueName: \"kubernetes.io/projected/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-api-access-45xb6\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.355853 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.355911 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.356029 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.457481 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.457536 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45xb6\" (UniqueName: \"kubernetes.io/projected/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-api-access-45xb6\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.457623 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.457673 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.462505 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.462501 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.465460 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.476200 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45xb6\" (UniqueName: \"kubernetes.io/projected/b4cc279e-ffd3-45a6-94cb-e787194bb137-kube-api-access-45xb6\") pod \"kube-state-metrics-0\" (UID: \"b4cc279e-ffd3-45a6-94cb-e787194bb137\") " pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.499308 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.855312 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.855961 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-central-agent" containerID="cri-o://17981c646652986a61322d0aab2c744ffd0c1e586fd7d31e1b8e2276b9997af9" gracePeriod=30 Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.856493 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="proxy-httpd" containerID="cri-o://4dee0cacc631e502121c83562826b14812b75ce060925f64005d0bbe5d8b94e3" gracePeriod=30 Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.856569 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="sg-core" containerID="cri-o://e884e3abfa3cfbfb4e8734abec44b9b560571159e4fc9edec8140d2e69cb1496" gracePeriod=30 Dec 05 15:20:39 crc kubenswrapper[4840]: I1205 15:20:39.856618 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-notification-agent" containerID="cri-o://bfd157ab1d04c540a4b5b08232ceaf982a8bc8589ba3dc6d1428a53e63816b07" gracePeriod=30 Dec 05 15:20:40 crc kubenswrapper[4840]: I1205 15:20:40.013717 4840 generic.go:334] "Generic (PLEG): container finished" podID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerID="4dee0cacc631e502121c83562826b14812b75ce060925f64005d0bbe5d8b94e3" exitCode=0 Dec 05 15:20:40 crc kubenswrapper[4840]: I1205 15:20:40.013749 4840 generic.go:334] "Generic (PLEG): container finished" podID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerID="e884e3abfa3cfbfb4e8734abec44b9b560571159e4fc9edec8140d2e69cb1496" exitCode=2 Dec 05 15:20:40 crc kubenswrapper[4840]: I1205 15:20:40.013767 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerDied","Data":"4dee0cacc631e502121c83562826b14812b75ce060925f64005d0bbe5d8b94e3"} Dec 05 15:20:40 crc kubenswrapper[4840]: I1205 15:20:40.013788 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerDied","Data":"e884e3abfa3cfbfb4e8734abec44b9b560571159e4fc9edec8140d2e69cb1496"} Dec 05 15:20:40 crc kubenswrapper[4840]: I1205 15:20:40.077248 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d91fd7f-f657-458d-9f80-3d915e5fa97d" path="/var/lib/kubelet/pods/2d91fd7f-f657-458d-9f80-3d915e5fa97d/volumes" Dec 05 15:20:40 crc kubenswrapper[4840]: W1205 15:20:40.105107 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4cc279e_ffd3_45a6_94cb_e787194bb137.slice/crio-e435984915a38d7bb4b4d72198acc2c018c5212b4209332f53404db37130c0c6 WatchSource:0}: Error finding container e435984915a38d7bb4b4d72198acc2c018c5212b4209332f53404db37130c0c6: Status 404 returned error can't find the container with id e435984915a38d7bb4b4d72198acc2c018c5212b4209332f53404db37130c0c6 Dec 05 15:20:40 crc kubenswrapper[4840]: I1205 15:20:40.110911 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 15:20:41 crc kubenswrapper[4840]: I1205 15:20:41.025538 4840 generic.go:334] "Generic (PLEG): container finished" podID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerID="17981c646652986a61322d0aab2c744ffd0c1e586fd7d31e1b8e2276b9997af9" exitCode=0 Dec 05 15:20:41 crc kubenswrapper[4840]: I1205 15:20:41.025614 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerDied","Data":"17981c646652986a61322d0aab2c744ffd0c1e586fd7d31e1b8e2276b9997af9"} Dec 05 15:20:41 crc kubenswrapper[4840]: I1205 15:20:41.027576 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b4cc279e-ffd3-45a6-94cb-e787194bb137","Type":"ContainerStarted","Data":"6ffa2a2abaf16aeafe03da9eb45d845d9a918a0d422e79d1785fa2701713657d"} Dec 05 15:20:41 crc kubenswrapper[4840]: I1205 15:20:41.027610 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b4cc279e-ffd3-45a6-94cb-e787194bb137","Type":"ContainerStarted","Data":"e435984915a38d7bb4b4d72198acc2c018c5212b4209332f53404db37130c0c6"} Dec 05 15:20:41 crc kubenswrapper[4840]: I1205 15:20:41.027739 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 15:20:41 crc kubenswrapper[4840]: I1205 15:20:41.046754 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.669308763 podStartE2EDuration="2.046733678s" podCreationTimestamp="2025-12-05 15:20:39 +0000 UTC" firstStartedPulling="2025-12-05 15:20:40.10762204 +0000 UTC m=+1318.448684664" lastFinishedPulling="2025-12-05 15:20:40.485046955 +0000 UTC m=+1318.826109579" observedRunningTime="2025-12-05 15:20:41.042968492 +0000 UTC m=+1319.384031116" watchObservedRunningTime="2025-12-05 15:20:41.046733678 +0000 UTC m=+1319.387796292" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.039083 4840 generic.go:334] "Generic (PLEG): container finished" podID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerID="bfd157ab1d04c540a4b5b08232ceaf982a8bc8589ba3dc6d1428a53e63816b07" exitCode=0 Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.039614 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerDied","Data":"bfd157ab1d04c540a4b5b08232ceaf982a8bc8589ba3dc6d1428a53e63816b07"} Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.179914 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.216579 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.245577 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416106 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-scripts\") pod \"b99404d9-830e-4e41-ac6d-74f02a5bc179\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416305 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-combined-ca-bundle\") pod \"b99404d9-830e-4e41-ac6d-74f02a5bc179\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416343 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-run-httpd\") pod \"b99404d9-830e-4e41-ac6d-74f02a5bc179\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416447 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-log-httpd\") pod \"b99404d9-830e-4e41-ac6d-74f02a5bc179\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416480 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-config-data\") pod \"b99404d9-830e-4e41-ac6d-74f02a5bc179\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416516 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-sg-core-conf-yaml\") pod \"b99404d9-830e-4e41-ac6d-74f02a5bc179\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416542 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cxdq\" (UniqueName: \"kubernetes.io/projected/b99404d9-830e-4e41-ac6d-74f02a5bc179-kube-api-access-2cxdq\") pod \"b99404d9-830e-4e41-ac6d-74f02a5bc179\" (UID: \"b99404d9-830e-4e41-ac6d-74f02a5bc179\") " Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.416730 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b99404d9-830e-4e41-ac6d-74f02a5bc179" (UID: "b99404d9-830e-4e41-ac6d-74f02a5bc179"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.417037 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b99404d9-830e-4e41-ac6d-74f02a5bc179" (UID: "b99404d9-830e-4e41-ac6d-74f02a5bc179"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.417289 4840 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.417316 4840 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b99404d9-830e-4e41-ac6d-74f02a5bc179-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.423063 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-scripts" (OuterVolumeSpecName: "scripts") pod "b99404d9-830e-4e41-ac6d-74f02a5bc179" (UID: "b99404d9-830e-4e41-ac6d-74f02a5bc179"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.424127 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b99404d9-830e-4e41-ac6d-74f02a5bc179-kube-api-access-2cxdq" (OuterVolumeSpecName: "kube-api-access-2cxdq") pod "b99404d9-830e-4e41-ac6d-74f02a5bc179" (UID: "b99404d9-830e-4e41-ac6d-74f02a5bc179"). InnerVolumeSpecName "kube-api-access-2cxdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.457125 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b99404d9-830e-4e41-ac6d-74f02a5bc179" (UID: "b99404d9-830e-4e41-ac6d-74f02a5bc179"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.518176 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b99404d9-830e-4e41-ac6d-74f02a5bc179" (UID: "b99404d9-830e-4e41-ac6d-74f02a5bc179"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.520027 4840 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.520126 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cxdq\" (UniqueName: \"kubernetes.io/projected/b99404d9-830e-4e41-ac6d-74f02a5bc179-kube-api-access-2cxdq\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.520204 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.520377 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.529060 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-config-data" (OuterVolumeSpecName: "config-data") pod "b99404d9-830e-4e41-ac6d-74f02a5bc179" (UID: "b99404d9-830e-4e41-ac6d-74f02a5bc179"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:42 crc kubenswrapper[4840]: I1205 15:20:42.622180 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b99404d9-830e-4e41-ac6d-74f02a5bc179-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.051774 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b99404d9-830e-4e41-ac6d-74f02a5bc179","Type":"ContainerDied","Data":"3552eb6a4ea4154f720d8ce3b98a20ef5456989f69b6de3c78518b57db2aaf2e"} Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.051814 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.051879 4840 scope.go:117] "RemoveContainer" containerID="4dee0cacc631e502121c83562826b14812b75ce060925f64005d0bbe5d8b94e3" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.083905 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.099734 4840 scope.go:117] "RemoveContainer" containerID="e884e3abfa3cfbfb4e8734abec44b9b560571159e4fc9edec8140d2e69cb1496" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.100912 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.121078 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.131676 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:43 crc kubenswrapper[4840]: E1205 15:20:43.132248 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-notification-agent" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132268 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-notification-agent" Dec 05 15:20:43 crc kubenswrapper[4840]: E1205 15:20:43.132284 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="proxy-httpd" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132290 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="proxy-httpd" Dec 05 15:20:43 crc kubenswrapper[4840]: E1205 15:20:43.132327 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="sg-core" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132332 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="sg-core" Dec 05 15:20:43 crc kubenswrapper[4840]: E1205 15:20:43.132343 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-central-agent" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132350 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-central-agent" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132577 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-central-agent" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132605 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="sg-core" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132627 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="proxy-httpd" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.132655 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" containerName="ceilometer-notification-agent" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.135517 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.138179 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.138355 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.138474 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.161014 4840 scope.go:117] "RemoveContainer" containerID="bfd157ab1d04c540a4b5b08232ceaf982a8bc8589ba3dc6d1428a53e63816b07" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.175124 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.175168 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.176461 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.195312 4840 scope.go:117] "RemoveContainer" containerID="17981c646652986a61322d0aab2c744ffd0c1e586fd7d31e1b8e2276b9997af9" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.333931 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-config-data\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.334334 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.334428 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-scripts\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.334552 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-log-httpd\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.335295 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqwqg\" (UniqueName: \"kubernetes.io/projected/b57de619-6aaf-4eec-aae7-ef5406b956e2-kube-api-access-nqwqg\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.335507 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-run-httpd\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.335572 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.335608 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437606 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-scripts\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437671 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-log-httpd\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437717 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqwqg\" (UniqueName: \"kubernetes.io/projected/b57de619-6aaf-4eec-aae7-ef5406b956e2-kube-api-access-nqwqg\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437767 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-run-httpd\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437789 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437811 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437886 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-config-data\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.437910 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.438291 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-run-httpd\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.438442 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-log-httpd\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.444532 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.444655 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-config-data\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.445093 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.447311 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.447965 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-scripts\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.453940 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqwqg\" (UniqueName: \"kubernetes.io/projected/b57de619-6aaf-4eec-aae7-ef5406b956e2-kube-api-access-nqwqg\") pod \"ceilometer-0\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.456855 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:43 crc kubenswrapper[4840]: I1205 15:20:43.914113 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:44 crc kubenswrapper[4840]: I1205 15:20:44.060435 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerStarted","Data":"8b4f5e8bbc68584a0936f5111fad16a5b5b37ad53cdc03ca88129ba06537ee21"} Dec 05 15:20:44 crc kubenswrapper[4840]: I1205 15:20:44.077441 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b99404d9-830e-4e41-ac6d-74f02a5bc179" path="/var/lib/kubelet/pods/b99404d9-830e-4e41-ac6d-74f02a5bc179/volumes" Dec 05 15:20:44 crc kubenswrapper[4840]: I1205 15:20:44.258131 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 15:20:44 crc kubenswrapper[4840]: I1205 15:20:44.258425 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 15:20:45 crc kubenswrapper[4840]: I1205 15:20:45.071065 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerStarted","Data":"76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63"} Dec 05 15:20:46 crc kubenswrapper[4840]: I1205 15:20:46.088753 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerStarted","Data":"c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae"} Dec 05 15:20:47 crc kubenswrapper[4840]: I1205 15:20:47.132823 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerStarted","Data":"d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b"} Dec 05 15:20:49 crc kubenswrapper[4840]: I1205 15:20:49.153334 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerStarted","Data":"f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2"} Dec 05 15:20:49 crc kubenswrapper[4840]: I1205 15:20:49.153881 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:20:49 crc kubenswrapper[4840]: I1205 15:20:49.173484 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.053610074 podStartE2EDuration="6.173460051s" podCreationTimestamp="2025-12-05 15:20:43 +0000 UTC" firstStartedPulling="2025-12-05 15:20:43.928027058 +0000 UTC m=+1322.269089672" lastFinishedPulling="2025-12-05 15:20:48.047877035 +0000 UTC m=+1326.388939649" observedRunningTime="2025-12-05 15:20:49.171699371 +0000 UTC m=+1327.512761985" watchObservedRunningTime="2025-12-05 15:20:49.173460051 +0000 UTC m=+1327.514522685" Dec 05 15:20:49 crc kubenswrapper[4840]: I1205 15:20:49.471603 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:20:49 crc kubenswrapper[4840]: I1205 15:20:49.471682 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:20:49 crc kubenswrapper[4840]: I1205 15:20:49.510244 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.050722 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.061662 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.166836 4840 generic.go:334] "Generic (PLEG): container finished" podID="600817fb-be86-4f40-a561-be53138f62d6" containerID="ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f" exitCode=137 Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.166910 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"600817fb-be86-4f40-a561-be53138f62d6","Type":"ContainerDied","Data":"ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f"} Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.166944 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.166981 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"600817fb-be86-4f40-a561-be53138f62d6","Type":"ContainerDied","Data":"24961cb45e569f6f9319906ec40f9050364e02132ff89bf42dbc432479d1f232"} Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.167001 4840 scope.go:117] "RemoveContainer" containerID="ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.171509 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-config-data\") pod \"600817fb-be86-4f40-a561-be53138f62d6\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.171634 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcsjz\" (UniqueName: \"kubernetes.io/projected/990a48c2-69ad-47c0-a3ce-ee9621c27074-kube-api-access-dcsjz\") pod \"990a48c2-69ad-47c0-a3ce-ee9621c27074\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.171709 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990a48c2-69ad-47c0-a3ce-ee9621c27074-logs\") pod \"990a48c2-69ad-47c0-a3ce-ee9621c27074\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.171787 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-config-data\") pod \"990a48c2-69ad-47c0-a3ce-ee9621c27074\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.171817 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-combined-ca-bundle\") pod \"990a48c2-69ad-47c0-a3ce-ee9621c27074\" (UID: \"990a48c2-69ad-47c0-a3ce-ee9621c27074\") " Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.172060 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-combined-ca-bundle\") pod \"600817fb-be86-4f40-a561-be53138f62d6\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.172087 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqntk\" (UniqueName: \"kubernetes.io/projected/600817fb-be86-4f40-a561-be53138f62d6-kube-api-access-nqntk\") pod \"600817fb-be86-4f40-a561-be53138f62d6\" (UID: \"600817fb-be86-4f40-a561-be53138f62d6\") " Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.174378 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/990a48c2-69ad-47c0-a3ce-ee9621c27074-logs" (OuterVolumeSpecName: "logs") pod "990a48c2-69ad-47c0-a3ce-ee9621c27074" (UID: "990a48c2-69ad-47c0-a3ce-ee9621c27074"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.175802 4840 generic.go:334] "Generic (PLEG): container finished" podID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerID="3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0" exitCode=137 Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.175893 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.176048 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"990a48c2-69ad-47c0-a3ce-ee9621c27074","Type":"ContainerDied","Data":"3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0"} Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.176101 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"990a48c2-69ad-47c0-a3ce-ee9621c27074","Type":"ContainerDied","Data":"5e3d8012f64544c06c199025250a752293d56efcab5e1127a84d0e0ee537c18e"} Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.193089 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/990a48c2-69ad-47c0-a3ce-ee9621c27074-kube-api-access-dcsjz" (OuterVolumeSpecName: "kube-api-access-dcsjz") pod "990a48c2-69ad-47c0-a3ce-ee9621c27074" (UID: "990a48c2-69ad-47c0-a3ce-ee9621c27074"). InnerVolumeSpecName "kube-api-access-dcsjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.196029 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/600817fb-be86-4f40-a561-be53138f62d6-kube-api-access-nqntk" (OuterVolumeSpecName: "kube-api-access-nqntk") pod "600817fb-be86-4f40-a561-be53138f62d6" (UID: "600817fb-be86-4f40-a561-be53138f62d6"). InnerVolumeSpecName "kube-api-access-nqntk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.197120 4840 scope.go:117] "RemoveContainer" containerID="ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f" Dec 05 15:20:50 crc kubenswrapper[4840]: E1205 15:20:50.197650 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f\": container with ID starting with ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f not found: ID does not exist" containerID="ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.197678 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f"} err="failed to get container status \"ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f\": rpc error: code = NotFound desc = could not find container \"ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f\": container with ID starting with ee91b53247aed487aebe3a948b35a9d7b25876796749f98d6bbfeb08c69e866f not found: ID does not exist" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.197700 4840 scope.go:117] "RemoveContainer" containerID="3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.209145 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-config-data" (OuterVolumeSpecName: "config-data") pod "990a48c2-69ad-47c0-a3ce-ee9621c27074" (UID: "990a48c2-69ad-47c0-a3ce-ee9621c27074"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.218574 4840 scope.go:117] "RemoveContainer" containerID="626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.222461 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "600817fb-be86-4f40-a561-be53138f62d6" (UID: "600817fb-be86-4f40-a561-be53138f62d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.224956 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "990a48c2-69ad-47c0-a3ce-ee9621c27074" (UID: "990a48c2-69ad-47c0-a3ce-ee9621c27074"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.250092 4840 scope.go:117] "RemoveContainer" containerID="3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.250096 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-config-data" (OuterVolumeSpecName: "config-data") pod "600817fb-be86-4f40-a561-be53138f62d6" (UID: "600817fb-be86-4f40-a561-be53138f62d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:50 crc kubenswrapper[4840]: E1205 15:20:50.250527 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0\": container with ID starting with 3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0 not found: ID does not exist" containerID="3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.250572 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0"} err="failed to get container status \"3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0\": rpc error: code = NotFound desc = could not find container \"3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0\": container with ID starting with 3ea6cfee348760780b72c3e2d4d17b2e20a3a26784542bef08829f45be7a9fb0 not found: ID does not exist" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.250599 4840 scope.go:117] "RemoveContainer" containerID="626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87" Dec 05 15:20:50 crc kubenswrapper[4840]: E1205 15:20:50.251006 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87\": container with ID starting with 626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87 not found: ID does not exist" containerID="626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.251041 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87"} err="failed to get container status \"626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87\": rpc error: code = NotFound desc = could not find container \"626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87\": container with ID starting with 626d8cbb3c16b2a74be9b59dc115bc8a9293fed9dc15136c912802c86b9abc87 not found: ID does not exist" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.274608 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.274652 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcsjz\" (UniqueName: \"kubernetes.io/projected/990a48c2-69ad-47c0-a3ce-ee9621c27074-kube-api-access-dcsjz\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.274668 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/990a48c2-69ad-47c0-a3ce-ee9621c27074-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.274679 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.274691 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/990a48c2-69ad-47c0-a3ce-ee9621c27074-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.274703 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/600817fb-be86-4f40-a561-be53138f62d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.274712 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqntk\" (UniqueName: \"kubernetes.io/projected/600817fb-be86-4f40-a561-be53138f62d6-kube-api-access-nqntk\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.505238 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.514499 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.528781 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: E1205 15:20:50.529269 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="600817fb-be86-4f40-a561-be53138f62d6" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.529290 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="600817fb-be86-4f40-a561-be53138f62d6" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 15:20:50 crc kubenswrapper[4840]: E1205 15:20:50.529326 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-log" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.529335 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-log" Dec 05 15:20:50 crc kubenswrapper[4840]: E1205 15:20:50.529352 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-metadata" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.529360 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-metadata" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.529569 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="600817fb-be86-4f40-a561-be53138f62d6" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.529582 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-log" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.529605 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" containerName="nova-metadata-metadata" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.530271 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.533622 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.534282 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.535272 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.541492 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.555030 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.568913 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.589775 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.589838 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.589882 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqkmn\" (UniqueName: \"kubernetes.io/projected/c5bca6a7-9654-492d-9687-e7672c18117f-kube-api-access-pqkmn\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.589941 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.589968 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.643953 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.647122 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.650103 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.650111 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.656131 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692109 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692262 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hntvp\" (UniqueName: \"kubernetes.io/projected/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-kube-api-access-hntvp\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692465 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692503 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692521 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqkmn\" (UniqueName: \"kubernetes.io/projected/c5bca6a7-9654-492d-9687-e7672c18117f-kube-api-access-pqkmn\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692621 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-config-data\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692649 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692677 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692727 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.692859 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-logs\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.699183 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.699346 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.702894 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.703916 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5bca6a7-9654-492d-9687-e7672c18117f-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.717445 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqkmn\" (UniqueName: \"kubernetes.io/projected/c5bca6a7-9654-492d-9687-e7672c18117f-kube-api-access-pqkmn\") pod \"nova-cell1-novncproxy-0\" (UID: \"c5bca6a7-9654-492d-9687-e7672c18117f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.795015 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.795083 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hntvp\" (UniqueName: \"kubernetes.io/projected/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-kube-api-access-hntvp\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.795168 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-config-data\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.795195 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.795248 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-logs\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.795782 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-logs\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.800062 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.801415 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.805559 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-config-data\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.816699 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hntvp\" (UniqueName: \"kubernetes.io/projected/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-kube-api-access-hntvp\") pod \"nova-metadata-0\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " pod="openstack/nova-metadata-0" Dec 05 15:20:50 crc kubenswrapper[4840]: I1205 15:20:50.850926 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:51 crc kubenswrapper[4840]: I1205 15:20:51.100579 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:20:51 crc kubenswrapper[4840]: I1205 15:20:51.296317 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 15:20:51 crc kubenswrapper[4840]: I1205 15:20:51.566048 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:20:51 crc kubenswrapper[4840]: W1205 15:20:51.570145 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c301a0e_a0bd_4ab4_b1e1_8d105434d534.slice/crio-6eba4c592bbf5d70b108060ad55cf4fd050ea663254c1c26cb7fd8028aa7596d WatchSource:0}: Error finding container 6eba4c592bbf5d70b108060ad55cf4fd050ea663254c1c26cb7fd8028aa7596d: Status 404 returned error can't find the container with id 6eba4c592bbf5d70b108060ad55cf4fd050ea663254c1c26cb7fd8028aa7596d Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.088202 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="600817fb-be86-4f40-a561-be53138f62d6" path="/var/lib/kubelet/pods/600817fb-be86-4f40-a561-be53138f62d6/volumes" Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.089505 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="990a48c2-69ad-47c0-a3ce-ee9621c27074" path="/var/lib/kubelet/pods/990a48c2-69ad-47c0-a3ce-ee9621c27074/volumes" Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.209855 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1c301a0e-a0bd-4ab4-b1e1-8d105434d534","Type":"ContainerStarted","Data":"ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38"} Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.210100 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1c301a0e-a0bd-4ab4-b1e1-8d105434d534","Type":"ContainerStarted","Data":"c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c"} Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.210111 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1c301a0e-a0bd-4ab4-b1e1-8d105434d534","Type":"ContainerStarted","Data":"6eba4c592bbf5d70b108060ad55cf4fd050ea663254c1c26cb7fd8028aa7596d"} Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.211556 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c5bca6a7-9654-492d-9687-e7672c18117f","Type":"ContainerStarted","Data":"c913cee7ec4a7e7685c5a031fc051740437d772ab47891161ce7545c9f1f4291"} Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.211601 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c5bca6a7-9654-492d-9687-e7672c18117f","Type":"ContainerStarted","Data":"23642c02418fae16ac539a91437076ea9a9143fd1851b6a55931253a252ae84a"} Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.242042 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.242018823 podStartE2EDuration="2.242018823s" podCreationTimestamp="2025-12-05 15:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:52.234846351 +0000 UTC m=+1330.575908965" watchObservedRunningTime="2025-12-05 15:20:52.242018823 +0000 UTC m=+1330.583081437" Dec 05 15:20:52 crc kubenswrapper[4840]: I1205 15:20:52.269709 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.269688182 podStartE2EDuration="2.269688182s" podCreationTimestamp="2025-12-05 15:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:52.266916924 +0000 UTC m=+1330.607979558" watchObservedRunningTime="2025-12-05 15:20:52.269688182 +0000 UTC m=+1330.610750796" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.174518 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.175154 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.175939 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.178187 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.222960 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.227261 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.422018 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-jq7m4"] Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.423766 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.439940 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-jq7m4"] Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.465594 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkqq8\" (UniqueName: \"kubernetes.io/projected/7c96306d-162b-44be-8dfb-fea1280e5644-kube-api-access-gkqq8\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.465656 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.465718 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.465781 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.465800 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.465838 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-config\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.567464 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkqq8\" (UniqueName: \"kubernetes.io/projected/7c96306d-162b-44be-8dfb-fea1280e5644-kube-api-access-gkqq8\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.567515 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.567643 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.568673 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.568152 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.568764 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.568772 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.568944 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-config\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.568986 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.569410 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.569674 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-config\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.585700 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkqq8\" (UniqueName: \"kubernetes.io/projected/7c96306d-162b-44be-8dfb-fea1280e5644-kube-api-access-gkqq8\") pod \"dnsmasq-dns-5c7b6c5df9-jq7m4\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:53 crc kubenswrapper[4840]: I1205 15:20:53.756207 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:54 crc kubenswrapper[4840]: I1205 15:20:54.300994 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-jq7m4"] Dec 05 15:20:55 crc kubenswrapper[4840]: I1205 15:20:55.270159 4840 generic.go:334] "Generic (PLEG): container finished" podID="7c96306d-162b-44be-8dfb-fea1280e5644" containerID="cd46a90e28eb02c64db85e8de9359cb7b8fbb098d84c8f5e39a5b452ac9f33f5" exitCode=0 Dec 05 15:20:55 crc kubenswrapper[4840]: I1205 15:20:55.270254 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" event={"ID":"7c96306d-162b-44be-8dfb-fea1280e5644","Type":"ContainerDied","Data":"cd46a90e28eb02c64db85e8de9359cb7b8fbb098d84c8f5e39a5b452ac9f33f5"} Dec 05 15:20:55 crc kubenswrapper[4840]: I1205 15:20:55.270666 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" event={"ID":"7c96306d-162b-44be-8dfb-fea1280e5644","Type":"ContainerStarted","Data":"732a72316b87c5bd8eb38413a3e0a233d5001e668ab247bb5c9283f69441cdec"} Dec 05 15:20:55 crc kubenswrapper[4840]: I1205 15:20:55.647823 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:20:55 crc kubenswrapper[4840]: I1205 15:20:55.851716 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.102018 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.102929 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.284725 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" event={"ID":"7c96306d-162b-44be-8dfb-fea1280e5644","Type":"ContainerStarted","Data":"0f7213ca076b969b3fc05dc52b961da27bf62ba6f9355abc17636d312dadc0d6"} Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.284949 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-log" containerID="cri-o://11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56" gracePeriod=30 Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.285126 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.285198 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-api" containerID="cri-o://5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0" gracePeriod=30 Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.322841 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" podStartSLOduration=3.322817831 podStartE2EDuration="3.322817831s" podCreationTimestamp="2025-12-05 15:20:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:20:56.299578187 +0000 UTC m=+1334.640640801" watchObservedRunningTime="2025-12-05 15:20:56.322817831 +0000 UTC m=+1334.663880445" Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.628004 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.629666 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-central-agent" containerID="cri-o://76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63" gracePeriod=30 Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.629844 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="sg-core" containerID="cri-o://d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b" gracePeriod=30 Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.629937 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-notification-agent" containerID="cri-o://c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae" gracePeriod=30 Dec 05 15:20:56 crc kubenswrapper[4840]: I1205 15:20:56.629879 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="proxy-httpd" containerID="cri-o://f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2" gracePeriod=30 Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.309077 4840 generic.go:334] "Generic (PLEG): container finished" podID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerID="f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2" exitCode=0 Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.309116 4840 generic.go:334] "Generic (PLEG): container finished" podID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerID="d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b" exitCode=2 Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.309132 4840 generic.go:334] "Generic (PLEG): container finished" podID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerID="76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63" exitCode=0 Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.309188 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerDied","Data":"f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2"} Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.309220 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerDied","Data":"d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b"} Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.309233 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerDied","Data":"76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63"} Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.311708 4840 generic.go:334] "Generic (PLEG): container finished" podID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerID="11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56" exitCode=143 Dec 05 15:20:57 crc kubenswrapper[4840]: I1205 15:20:57.311777 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcfed2c6-10b9-4745-b2c3-b461eab145bb","Type":"ContainerDied","Data":"11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56"} Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.725240 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.780573 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-run-httpd\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.780657 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-combined-ca-bundle\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.780692 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-ceilometer-tls-certs\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.780716 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-log-httpd\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.780816 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-scripts\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.780905 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqwqg\" (UniqueName: \"kubernetes.io/projected/b57de619-6aaf-4eec-aae7-ef5406b956e2-kube-api-access-nqwqg\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.780985 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-sg-core-conf-yaml\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.781264 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-config-data\") pod \"b57de619-6aaf-4eec-aae7-ef5406b956e2\" (UID: \"b57de619-6aaf-4eec-aae7-ef5406b956e2\") " Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.781025 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.784232 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.789042 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-scripts" (OuterVolumeSpecName: "scripts") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.793067 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b57de619-6aaf-4eec-aae7-ef5406b956e2-kube-api-access-nqwqg" (OuterVolumeSpecName: "kube-api-access-nqwqg") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "kube-api-access-nqwqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.834793 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.853547 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.881323 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.883743 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqwqg\" (UniqueName: \"kubernetes.io/projected/b57de619-6aaf-4eec-aae7-ef5406b956e2-kube-api-access-nqwqg\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.883760 4840 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.883772 4840 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.883780 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.883790 4840 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.883799 4840 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b57de619-6aaf-4eec-aae7-ef5406b956e2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.883806 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.893266 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-config-data" (OuterVolumeSpecName: "config-data") pod "b57de619-6aaf-4eec-aae7-ef5406b956e2" (UID: "b57de619-6aaf-4eec-aae7-ef5406b956e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:58 crc kubenswrapper[4840]: I1205 15:20:58.985164 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b57de619-6aaf-4eec-aae7-ef5406b956e2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.330662 4840 generic.go:334] "Generic (PLEG): container finished" podID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerID="c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae" exitCode=0 Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.330702 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerDied","Data":"c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae"} Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.330727 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b57de619-6aaf-4eec-aae7-ef5406b956e2","Type":"ContainerDied","Data":"8b4f5e8bbc68584a0936f5111fad16a5b5b37ad53cdc03ca88129ba06537ee21"} Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.330743 4840 scope.go:117] "RemoveContainer" containerID="f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.330859 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.360174 4840 scope.go:117] "RemoveContainer" containerID="d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.387595 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.407164 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.422920 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.423553 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-central-agent" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.423633 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-central-agent" Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.423705 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="sg-core" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.423757 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="sg-core" Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.423841 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="proxy-httpd" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.423926 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="proxy-httpd" Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.424023 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-notification-agent" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.424107 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-notification-agent" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.424406 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-notification-agent" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.424487 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="sg-core" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.424571 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="ceilometer-central-agent" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.424660 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" containerName="proxy-httpd" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.427012 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.429036 4840 scope.go:117] "RemoveContainer" containerID="c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.429418 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.432462 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.433713 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.433893 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.459799 4840 scope.go:117] "RemoveContainer" containerID="76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494550 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d48a78c-f903-4b87-8c99-622c71bac6aa-run-httpd\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494647 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494701 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494733 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d48a78c-f903-4b87-8c99-622c71bac6aa-log-httpd\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494807 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494830 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5jzc\" (UniqueName: \"kubernetes.io/projected/7d48a78c-f903-4b87-8c99-622c71bac6aa-kube-api-access-d5jzc\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494857 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-config-data\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.494894 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-scripts\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.581437 4840 scope.go:117] "RemoveContainer" containerID="f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2" Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.581926 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2\": container with ID starting with f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2 not found: ID does not exist" containerID="f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.581965 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2"} err="failed to get container status \"f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2\": rpc error: code = NotFound desc = could not find container \"f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2\": container with ID starting with f4f721ee8e871dcc1d3b1a50fbc97040bb1085127891629a2d66cfe00dc338e2 not found: ID does not exist" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.581990 4840 scope.go:117] "RemoveContainer" containerID="d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b" Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.582234 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b\": container with ID starting with d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b not found: ID does not exist" containerID="d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.582294 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b"} err="failed to get container status \"d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b\": rpc error: code = NotFound desc = could not find container \"d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b\": container with ID starting with d62d33b21fa4fc418d14cc07576a938dd10d5d12dc0cb4b42567056055c94c6b not found: ID does not exist" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.582313 4840 scope.go:117] "RemoveContainer" containerID="c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae" Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.584018 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae\": container with ID starting with c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae not found: ID does not exist" containerID="c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.584055 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae"} err="failed to get container status \"c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae\": rpc error: code = NotFound desc = could not find container \"c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae\": container with ID starting with c612adbaf8efa7c0ffa092f7fcae060c89ee20f110cab0975c818ac7f3c8f9ae not found: ID does not exist" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.584068 4840 scope.go:117] "RemoveContainer" containerID="76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63" Dec 05 15:20:59 crc kubenswrapper[4840]: E1205 15:20:59.584315 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63\": container with ID starting with 76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63 not found: ID does not exist" containerID="76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.584341 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63"} err="failed to get container status \"76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63\": rpc error: code = NotFound desc = could not find container \"76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63\": container with ID starting with 76d4e57af5dcf7f638c14aff493e0d9283251afdcd9d65a71cd9bb37c6ad2b63 not found: ID does not exist" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596336 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596684 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d48a78c-f903-4b87-8c99-622c71bac6aa-log-httpd\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596733 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596754 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5jzc\" (UniqueName: \"kubernetes.io/projected/7d48a78c-f903-4b87-8c99-622c71bac6aa-kube-api-access-d5jzc\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596781 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-config-data\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596801 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-scripts\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596838 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d48a78c-f903-4b87-8c99-622c71bac6aa-run-httpd\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.596979 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.599099 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d48a78c-f903-4b87-8c99-622c71bac6aa-log-httpd\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.599408 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d48a78c-f903-4b87-8c99-622c71bac6aa-run-httpd\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.602951 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.608561 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.610434 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.612041 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-scripts\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.612532 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d48a78c-f903-4b87-8c99-622c71bac6aa-config-data\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.618320 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5jzc\" (UniqueName: \"kubernetes.io/projected/7d48a78c-f903-4b87-8c99-622c71bac6aa-kube-api-access-d5jzc\") pod \"ceilometer-0\" (UID: \"7d48a78c-f903-4b87-8c99-622c71bac6aa\") " pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.745648 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.827472 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.901471 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-combined-ca-bundle\") pod \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.902173 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-config-data\") pod \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.903045 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l64kx\" (UniqueName: \"kubernetes.io/projected/fcfed2c6-10b9-4745-b2c3-b461eab145bb-kube-api-access-l64kx\") pod \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.903119 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcfed2c6-10b9-4745-b2c3-b461eab145bb-logs\") pod \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\" (UID: \"fcfed2c6-10b9-4745-b2c3-b461eab145bb\") " Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.904621 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcfed2c6-10b9-4745-b2c3-b461eab145bb-logs" (OuterVolumeSpecName: "logs") pod "fcfed2c6-10b9-4745-b2c3-b461eab145bb" (UID: "fcfed2c6-10b9-4745-b2c3-b461eab145bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.909183 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcfed2c6-10b9-4745-b2c3-b461eab145bb-kube-api-access-l64kx" (OuterVolumeSpecName: "kube-api-access-l64kx") pod "fcfed2c6-10b9-4745-b2c3-b461eab145bb" (UID: "fcfed2c6-10b9-4745-b2c3-b461eab145bb"). InnerVolumeSpecName "kube-api-access-l64kx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.945946 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcfed2c6-10b9-4745-b2c3-b461eab145bb" (UID: "fcfed2c6-10b9-4745-b2c3-b461eab145bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:20:59 crc kubenswrapper[4840]: I1205 15:20:59.959368 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-config-data" (OuterVolumeSpecName: "config-data") pod "fcfed2c6-10b9-4745-b2c3-b461eab145bb" (UID: "fcfed2c6-10b9-4745-b2c3-b461eab145bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.011323 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.019228 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcfed2c6-10b9-4745-b2c3-b461eab145bb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.019271 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l64kx\" (UniqueName: \"kubernetes.io/projected/fcfed2c6-10b9-4745-b2c3-b461eab145bb-kube-api-access-l64kx\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.019291 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcfed2c6-10b9-4745-b2c3-b461eab145bb-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.085732 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b57de619-6aaf-4eec-aae7-ef5406b956e2" path="/var/lib/kubelet/pods/b57de619-6aaf-4eec-aae7-ef5406b956e2/volumes" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.338772 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.345970 4840 generic.go:334] "Generic (PLEG): container finished" podID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerID="5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0" exitCode=0 Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.346032 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcfed2c6-10b9-4745-b2c3-b461eab145bb","Type":"ContainerDied","Data":"5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0"} Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.346040 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.346071 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fcfed2c6-10b9-4745-b2c3-b461eab145bb","Type":"ContainerDied","Data":"49f9c853b360eb9cce93959b1ce7dfae531a7f93e58cd2ae3f7d25cb9de51000"} Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.346089 4840 scope.go:117] "RemoveContainer" containerID="5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0" Dec 05 15:21:00 crc kubenswrapper[4840]: W1205 15:21:00.355761 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d48a78c_f903_4b87_8c99_622c71bac6aa.slice/crio-7505ad40e48c1fdebf747534f67e01caeb7a8d3779926548e771192b2d76c879 WatchSource:0}: Error finding container 7505ad40e48c1fdebf747534f67e01caeb7a8d3779926548e771192b2d76c879: Status 404 returned error can't find the container with id 7505ad40e48c1fdebf747534f67e01caeb7a8d3779926548e771192b2d76c879 Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.435777 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.455393 4840 scope.go:117] "RemoveContainer" containerID="11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.458326 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.475399 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:00 crc kubenswrapper[4840]: E1205 15:21:00.476196 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-log" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.476216 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-log" Dec 05 15:21:00 crc kubenswrapper[4840]: E1205 15:21:00.476238 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-api" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.476247 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-api" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.476457 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-log" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.476484 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" containerName="nova-api-api" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.477684 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.478799 4840 scope.go:117] "RemoveContainer" containerID="5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0" Dec 05 15:21:00 crc kubenswrapper[4840]: E1205 15:21:00.479192 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0\": container with ID starting with 5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0 not found: ID does not exist" containerID="5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.479219 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0"} err="failed to get container status \"5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0\": rpc error: code = NotFound desc = could not find container \"5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0\": container with ID starting with 5ca616b17146e61f90b43d9b96f90f78ea5ec8c548089e014cee78c20a5810e0 not found: ID does not exist" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.479241 4840 scope.go:117] "RemoveContainer" containerID="11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56" Dec 05 15:21:00 crc kubenswrapper[4840]: E1205 15:21:00.479648 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56\": container with ID starting with 11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56 not found: ID does not exist" containerID="11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.479744 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56"} err="failed to get container status \"11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56\": rpc error: code = NotFound desc = could not find container \"11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56\": container with ID starting with 11c70fe994411d828878bb665b2110adcd11f5ace3384cbfb4dbf438407c1e56 not found: ID does not exist" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.479985 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.481199 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.481357 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.487147 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.528730 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.528808 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-public-tls-certs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.528833 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdl9d\" (UniqueName: \"kubernetes.io/projected/4b1fdcf9-4e2d-4681-869f-472f61c8da40-kube-api-access-mdl9d\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.528890 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-config-data\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.528956 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b1fdcf9-4e2d-4681-869f-472f61c8da40-logs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.528979 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.630809 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b1fdcf9-4e2d-4681-869f-472f61c8da40-logs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.630880 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.630982 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.631044 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-public-tls-certs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.631069 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdl9d\" (UniqueName: \"kubernetes.io/projected/4b1fdcf9-4e2d-4681-869f-472f61c8da40-kube-api-access-mdl9d\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.631099 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-config-data\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.632285 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b1fdcf9-4e2d-4681-869f-472f61c8da40-logs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.636926 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-public-tls-certs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.637196 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-config-data\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.637234 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.639452 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.647659 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdl9d\" (UniqueName: \"kubernetes.io/projected/4b1fdcf9-4e2d-4681-869f-472f61c8da40-kube-api-access-mdl9d\") pod \"nova-api-0\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.800900 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.851665 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:21:00 crc kubenswrapper[4840]: I1205 15:21:00.875218 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.101478 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.101914 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.280920 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:01 crc kubenswrapper[4840]: W1205 15:21:01.298577 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b1fdcf9_4e2d_4681_869f_472f61c8da40.slice/crio-1bac47a7039a2df7714dd895ce344c007f07451c1b0975278fa3aa6eba595233 WatchSource:0}: Error finding container 1bac47a7039a2df7714dd895ce344c007f07451c1b0975278fa3aa6eba595233: Status 404 returned error can't find the container with id 1bac47a7039a2df7714dd895ce344c007f07451c1b0975278fa3aa6eba595233 Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.358676 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4b1fdcf9-4e2d-4681-869f-472f61c8da40","Type":"ContainerStarted","Data":"1bac47a7039a2df7714dd895ce344c007f07451c1b0975278fa3aa6eba595233"} Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.362636 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d48a78c-f903-4b87-8c99-622c71bac6aa","Type":"ContainerStarted","Data":"9f15377cd894e6ab7c9545546b71a83a9255ee316fa3fdf3cece4ceb5c899818"} Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.362689 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d48a78c-f903-4b87-8c99-622c71bac6aa","Type":"ContainerStarted","Data":"7505ad40e48c1fdebf747534f67e01caeb7a8d3779926548e771192b2d76c879"} Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.415097 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.550594 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-49rjk"] Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.552436 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.558527 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.558917 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.591222 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-49rjk"] Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.659053 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85mv4\" (UniqueName: \"kubernetes.io/projected/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-kube-api-access-85mv4\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.659135 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-config-data\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.659523 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.659667 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-scripts\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.760949 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85mv4\" (UniqueName: \"kubernetes.io/projected/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-kube-api-access-85mv4\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.761666 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-config-data\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.762481 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.762612 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-scripts\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.765516 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.765832 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-config-data\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.766189 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-scripts\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:01 crc kubenswrapper[4840]: I1205 15:21:01.778511 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85mv4\" (UniqueName: \"kubernetes.io/projected/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-kube-api-access-85mv4\") pod \"nova-cell1-cell-mapping-49rjk\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.057741 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.104299 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcfed2c6-10b9-4745-b2c3-b461eab145bb" path="/var/lib/kubelet/pods/fcfed2c6-10b9-4745-b2c3-b461eab145bb/volumes" Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.122075 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.122445 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.378100 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4b1fdcf9-4e2d-4681-869f-472f61c8da40","Type":"ContainerStarted","Data":"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643"} Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.378348 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4b1fdcf9-4e2d-4681-869f-472f61c8da40","Type":"ContainerStarted","Data":"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d"} Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.406438 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.406412111 podStartE2EDuration="2.406412111s" podCreationTimestamp="2025-12-05 15:21:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:21:02.400757421 +0000 UTC m=+1340.741820025" watchObservedRunningTime="2025-12-05 15:21:02.406412111 +0000 UTC m=+1340.747474735" Dec 05 15:21:02 crc kubenswrapper[4840]: I1205 15:21:02.616347 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-49rjk"] Dec 05 15:21:02 crc kubenswrapper[4840]: W1205 15:21:02.620696 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe6183d0_57eb_47e8_8fe3_15cacaedfde3.slice/crio-e0d77b15d6b276e9e18f312ccd39ebe3df9f070eb36a7c28cc76ea35e57c4a61 WatchSource:0}: Error finding container e0d77b15d6b276e9e18f312ccd39ebe3df9f070eb36a7c28cc76ea35e57c4a61: Status 404 returned error can't find the container with id e0d77b15d6b276e9e18f312ccd39ebe3df9f070eb36a7c28cc76ea35e57c4a61 Dec 05 15:21:03 crc kubenswrapper[4840]: I1205 15:21:03.388836 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-49rjk" event={"ID":"fe6183d0-57eb-47e8-8fe3-15cacaedfde3","Type":"ContainerStarted","Data":"ef726b6aea293f80b3eda56e0a7d32b0a78fa9d15a000bc82bc76f2e62bc2bc5"} Dec 05 15:21:03 crc kubenswrapper[4840]: I1205 15:21:03.389428 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-49rjk" event={"ID":"fe6183d0-57eb-47e8-8fe3-15cacaedfde3","Type":"ContainerStarted","Data":"e0d77b15d6b276e9e18f312ccd39ebe3df9f070eb36a7c28cc76ea35e57c4a61"} Dec 05 15:21:03 crc kubenswrapper[4840]: I1205 15:21:03.409793 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d48a78c-f903-4b87-8c99-622c71bac6aa","Type":"ContainerStarted","Data":"f92eebaca28aa5ec60a9d8b51bca77f4e115a118bf6ffb7be29235cf784a2bb1"} Dec 05 15:21:03 crc kubenswrapper[4840]: I1205 15:21:03.419050 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-49rjk" podStartSLOduration=2.419030136 podStartE2EDuration="2.419030136s" podCreationTimestamp="2025-12-05 15:21:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:21:03.412838472 +0000 UTC m=+1341.753901086" watchObservedRunningTime="2025-12-05 15:21:03.419030136 +0000 UTC m=+1341.760092750" Dec 05 15:21:03 crc kubenswrapper[4840]: I1205 15:21:03.757341 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:21:03 crc kubenswrapper[4840]: I1205 15:21:03.820522 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-cmbv2"] Dec 05 15:21:03 crc kubenswrapper[4840]: I1205 15:21:03.820843 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" podUID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerName="dnsmasq-dns" containerID="cri-o://7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15" gracePeriod=10 Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.405181 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.416707 4840 generic.go:334] "Generic (PLEG): container finished" podID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerID="7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15" exitCode=0 Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.416767 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.416777 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" event={"ID":"91d8d92c-10f0-453f-bb63-01bfaa5258c0","Type":"ContainerDied","Data":"7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15"} Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.416805 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-cmbv2" event={"ID":"91d8d92c-10f0-453f-bb63-01bfaa5258c0","Type":"ContainerDied","Data":"49b928f6e349c97d05a1257a7e17065d7d58b21ebfceb8181558a7ce601fa4fb"} Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.416822 4840 scope.go:117] "RemoveContainer" containerID="7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.420911 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d48a78c-f903-4b87-8c99-622c71bac6aa","Type":"ContainerStarted","Data":"c706ab922c8e13c30a3a36225203e2b0644a7d3c0f80aa761520d4c0cc2d0582"} Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.439613 4840 scope.go:117] "RemoveContainer" containerID="4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.483387 4840 scope.go:117] "RemoveContainer" containerID="7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15" Dec 05 15:21:04 crc kubenswrapper[4840]: E1205 15:21:04.483744 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15\": container with ID starting with 7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15 not found: ID does not exist" containerID="7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.483901 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15"} err="failed to get container status \"7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15\": rpc error: code = NotFound desc = could not find container \"7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15\": container with ID starting with 7208ba63c580147aa108f074e1a600ebde37182acafe66574e0fa42950377a15 not found: ID does not exist" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.484069 4840 scope.go:117] "RemoveContainer" containerID="4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700" Dec 05 15:21:04 crc kubenswrapper[4840]: E1205 15:21:04.484588 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700\": container with ID starting with 4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700 not found: ID does not exist" containerID="4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.484687 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700"} err="failed to get container status \"4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700\": rpc error: code = NotFound desc = could not find container \"4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700\": container with ID starting with 4c6c50637b763dd0ff57737c7280c9fd1f36c4c1dffcde6c317d8efdb3e19700 not found: ID does not exist" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.526857 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-nb\") pod \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.527150 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-sb\") pod \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.527318 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-swift-storage-0\") pod \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.527498 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjfcn\" (UniqueName: \"kubernetes.io/projected/91d8d92c-10f0-453f-bb63-01bfaa5258c0-kube-api-access-vjfcn\") pod \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.527610 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-config\") pod \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.527893 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-svc\") pod \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\" (UID: \"91d8d92c-10f0-453f-bb63-01bfaa5258c0\") " Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.534389 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91d8d92c-10f0-453f-bb63-01bfaa5258c0-kube-api-access-vjfcn" (OuterVolumeSpecName: "kube-api-access-vjfcn") pod "91d8d92c-10f0-453f-bb63-01bfaa5258c0" (UID: "91d8d92c-10f0-453f-bb63-01bfaa5258c0"). InnerVolumeSpecName "kube-api-access-vjfcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.607655 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-config" (OuterVolumeSpecName: "config") pod "91d8d92c-10f0-453f-bb63-01bfaa5258c0" (UID: "91d8d92c-10f0-453f-bb63-01bfaa5258c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.610508 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "91d8d92c-10f0-453f-bb63-01bfaa5258c0" (UID: "91d8d92c-10f0-453f-bb63-01bfaa5258c0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.616907 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "91d8d92c-10f0-453f-bb63-01bfaa5258c0" (UID: "91d8d92c-10f0-453f-bb63-01bfaa5258c0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.619002 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "91d8d92c-10f0-453f-bb63-01bfaa5258c0" (UID: "91d8d92c-10f0-453f-bb63-01bfaa5258c0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.622565 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "91d8d92c-10f0-453f-bb63-01bfaa5258c0" (UID: "91d8d92c-10f0-453f-bb63-01bfaa5258c0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.630536 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.630561 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.630570 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.630580 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.630589 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjfcn\" (UniqueName: \"kubernetes.io/projected/91d8d92c-10f0-453f-bb63-01bfaa5258c0-kube-api-access-vjfcn\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.630598 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d8d92c-10f0-453f-bb63-01bfaa5258c0-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.775624 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-cmbv2"] Dec 05 15:21:04 crc kubenswrapper[4840]: I1205 15:21:04.789453 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-cmbv2"] Dec 05 15:21:05 crc kubenswrapper[4840]: I1205 15:21:05.434061 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d48a78c-f903-4b87-8c99-622c71bac6aa","Type":"ContainerStarted","Data":"e7cbb528540f6bed522c3a7c6f1886ef706176ad714ded1c0bb6b7e68a7ff42d"} Dec 05 15:21:05 crc kubenswrapper[4840]: I1205 15:21:05.434720 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 15:21:05 crc kubenswrapper[4840]: I1205 15:21:05.501264 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.298533742 podStartE2EDuration="6.501237982s" podCreationTimestamp="2025-12-05 15:20:59 +0000 UTC" firstStartedPulling="2025-12-05 15:21:00.360103815 +0000 UTC m=+1338.701166429" lastFinishedPulling="2025-12-05 15:21:04.562808055 +0000 UTC m=+1342.903870669" observedRunningTime="2025-12-05 15:21:05.479433409 +0000 UTC m=+1343.820496053" watchObservedRunningTime="2025-12-05 15:21:05.501237982 +0000 UTC m=+1343.842300596" Dec 05 15:21:06 crc kubenswrapper[4840]: I1205 15:21:06.081089 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" path="/var/lib/kubelet/pods/91d8d92c-10f0-453f-bb63-01bfaa5258c0/volumes" Dec 05 15:21:08 crc kubenswrapper[4840]: I1205 15:21:08.477726 4840 generic.go:334] "Generic (PLEG): container finished" podID="fe6183d0-57eb-47e8-8fe3-15cacaedfde3" containerID="ef726b6aea293f80b3eda56e0a7d32b0a78fa9d15a000bc82bc76f2e62bc2bc5" exitCode=0 Dec 05 15:21:08 crc kubenswrapper[4840]: I1205 15:21:08.478065 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-49rjk" event={"ID":"fe6183d0-57eb-47e8-8fe3-15cacaedfde3","Type":"ContainerDied","Data":"ef726b6aea293f80b3eda56e0a7d32b0a78fa9d15a000bc82bc76f2e62bc2bc5"} Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.882737 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.935309 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-config-data\") pod \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.935366 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-scripts\") pod \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.935470 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85mv4\" (UniqueName: \"kubernetes.io/projected/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-kube-api-access-85mv4\") pod \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.935505 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-combined-ca-bundle\") pod \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\" (UID: \"fe6183d0-57eb-47e8-8fe3-15cacaedfde3\") " Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.941440 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-scripts" (OuterVolumeSpecName: "scripts") pod "fe6183d0-57eb-47e8-8fe3-15cacaedfde3" (UID: "fe6183d0-57eb-47e8-8fe3-15cacaedfde3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.941553 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-kube-api-access-85mv4" (OuterVolumeSpecName: "kube-api-access-85mv4") pod "fe6183d0-57eb-47e8-8fe3-15cacaedfde3" (UID: "fe6183d0-57eb-47e8-8fe3-15cacaedfde3"). InnerVolumeSpecName "kube-api-access-85mv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.971679 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-config-data" (OuterVolumeSpecName: "config-data") pod "fe6183d0-57eb-47e8-8fe3-15cacaedfde3" (UID: "fe6183d0-57eb-47e8-8fe3-15cacaedfde3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:09 crc kubenswrapper[4840]: I1205 15:21:09.973342 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe6183d0-57eb-47e8-8fe3-15cacaedfde3" (UID: "fe6183d0-57eb-47e8-8fe3-15cacaedfde3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.040031 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.040067 4840 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.040077 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85mv4\" (UniqueName: \"kubernetes.io/projected/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-kube-api-access-85mv4\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.040087 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6183d0-57eb-47e8-8fe3-15cacaedfde3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.499237 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-49rjk" event={"ID":"fe6183d0-57eb-47e8-8fe3-15cacaedfde3","Type":"ContainerDied","Data":"e0d77b15d6b276e9e18f312ccd39ebe3df9f070eb36a7c28cc76ea35e57c4a61"} Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.499543 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0d77b15d6b276e9e18f312ccd39ebe3df9f070eb36a7c28cc76ea35e57c4a61" Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.499338 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-49rjk" Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.697163 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.697589 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1d7097e8-da3f-444a-9e06-20960a21b38b" containerName="nova-scheduler-scheduler" containerID="cri-o://23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f" gracePeriod=30 Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.712482 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.712860 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-log" containerID="cri-o://0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d" gracePeriod=30 Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.712955 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-api" containerID="cri-o://fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643" gracePeriod=30 Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.723655 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.723955 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-log" containerID="cri-o://c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c" gracePeriod=30 Dec 05 15:21:10 crc kubenswrapper[4840]: I1205 15:21:10.724102 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-metadata" containerID="cri-o://ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38" gracePeriod=30 Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.318568 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.476409 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b1fdcf9-4e2d-4681-869f-472f61c8da40-logs\") pod \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.476494 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-config-data\") pod \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.476544 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-combined-ca-bundle\") pod \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.476589 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-internal-tls-certs\") pod \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.476618 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-public-tls-certs\") pod \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.476669 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdl9d\" (UniqueName: \"kubernetes.io/projected/4b1fdcf9-4e2d-4681-869f-472f61c8da40-kube-api-access-mdl9d\") pod \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\" (UID: \"4b1fdcf9-4e2d-4681-869f-472f61c8da40\") " Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.476916 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b1fdcf9-4e2d-4681-869f-472f61c8da40-logs" (OuterVolumeSpecName: "logs") pod "4b1fdcf9-4e2d-4681-869f-472f61c8da40" (UID: "4b1fdcf9-4e2d-4681-869f-472f61c8da40"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.477912 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b1fdcf9-4e2d-4681-869f-472f61c8da40-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.483319 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b1fdcf9-4e2d-4681-869f-472f61c8da40-kube-api-access-mdl9d" (OuterVolumeSpecName: "kube-api-access-mdl9d") pod "4b1fdcf9-4e2d-4681-869f-472f61c8da40" (UID: "4b1fdcf9-4e2d-4681-869f-472f61c8da40"). InnerVolumeSpecName "kube-api-access-mdl9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.506726 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b1fdcf9-4e2d-4681-869f-472f61c8da40" (UID: "4b1fdcf9-4e2d-4681-869f-472f61c8da40"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.513805 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-config-data" (OuterVolumeSpecName: "config-data") pod "4b1fdcf9-4e2d-4681-869f-472f61c8da40" (UID: "4b1fdcf9-4e2d-4681-869f-472f61c8da40"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.516360 4840 generic.go:334] "Generic (PLEG): container finished" podID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerID="c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c" exitCode=143 Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.516443 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1c301a0e-a0bd-4ab4-b1e1-8d105434d534","Type":"ContainerDied","Data":"c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c"} Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.523326 4840 generic.go:334] "Generic (PLEG): container finished" podID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerID="fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643" exitCode=0 Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.523357 4840 generic.go:334] "Generic (PLEG): container finished" podID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerID="0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d" exitCode=143 Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.523367 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4b1fdcf9-4e2d-4681-869f-472f61c8da40","Type":"ContainerDied","Data":"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643"} Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.523409 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4b1fdcf9-4e2d-4681-869f-472f61c8da40","Type":"ContainerDied","Data":"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d"} Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.523419 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4b1fdcf9-4e2d-4681-869f-472f61c8da40","Type":"ContainerDied","Data":"1bac47a7039a2df7714dd895ce344c007f07451c1b0975278fa3aa6eba595233"} Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.523436 4840 scope.go:117] "RemoveContainer" containerID="fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.523622 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.542238 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4b1fdcf9-4e2d-4681-869f-472f61c8da40" (UID: "4b1fdcf9-4e2d-4681-869f-472f61c8da40"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.547061 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4b1fdcf9-4e2d-4681-869f-472f61c8da40" (UID: "4b1fdcf9-4e2d-4681-869f-472f61c8da40"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.547836 4840 scope.go:117] "RemoveContainer" containerID="0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.573110 4840 scope.go:117] "RemoveContainer" containerID="fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643" Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.573585 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643\": container with ID starting with fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643 not found: ID does not exist" containerID="fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.573643 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643"} err="failed to get container status \"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643\": rpc error: code = NotFound desc = could not find container \"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643\": container with ID starting with fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643 not found: ID does not exist" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.573681 4840 scope.go:117] "RemoveContainer" containerID="0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d" Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.574159 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d\": container with ID starting with 0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d not found: ID does not exist" containerID="0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.574194 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d"} err="failed to get container status \"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d\": rpc error: code = NotFound desc = could not find container \"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d\": container with ID starting with 0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d not found: ID does not exist" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.574212 4840 scope.go:117] "RemoveContainer" containerID="fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.574469 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643"} err="failed to get container status \"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643\": rpc error: code = NotFound desc = could not find container \"fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643\": container with ID starting with fbaedcc5e387cc2218c24e854911786f6e43c689cf2a262171059cbe8c55a643 not found: ID does not exist" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.574492 4840 scope.go:117] "RemoveContainer" containerID="0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.574707 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d"} err="failed to get container status \"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d\": rpc error: code = NotFound desc = could not find container \"0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d\": container with ID starting with 0e5590d07911a213cda8306fdfb7c09e4039d458d4a85f0349cc6f592e029f9d not found: ID does not exist" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.580067 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.580104 4840 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.580112 4840 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.580123 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdl9d\" (UniqueName: \"kubernetes.io/projected/4b1fdcf9-4e2d-4681-869f-472f61c8da40-kube-api-access-mdl9d\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.580135 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1fdcf9-4e2d-4681-869f-472f61c8da40-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.939193 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.960524 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.969639 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.985240 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.985712 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-api" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.985732 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-api" Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.985744 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerName="dnsmasq-dns" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.985752 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerName="dnsmasq-dns" Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.985786 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-log" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.985793 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-log" Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.985811 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe6183d0-57eb-47e8-8fe3-15cacaedfde3" containerName="nova-manage" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.985819 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe6183d0-57eb-47e8-8fe3-15cacaedfde3" containerName="nova-manage" Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.985837 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7097e8-da3f-444a-9e06-20960a21b38b" containerName="nova-scheduler-scheduler" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.985845 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7097e8-da3f-444a-9e06-20960a21b38b" containerName="nova-scheduler-scheduler" Dec 05 15:21:11 crc kubenswrapper[4840]: E1205 15:21:11.985893 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerName="init" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.985905 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerName="init" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.986117 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe6183d0-57eb-47e8-8fe3-15cacaedfde3" containerName="nova-manage" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.986130 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-api" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.986147 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" containerName="nova-api-log" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.986168 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="91d8d92c-10f0-453f-bb63-01bfaa5258c0" containerName="dnsmasq-dns" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.986181 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d7097e8-da3f-444a-9e06-20960a21b38b" containerName="nova-scheduler-scheduler" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.987392 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.991175 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.991251 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 15:21:11 crc kubenswrapper[4840]: I1205 15:21:11.991485 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.021925 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.078413 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b1fdcf9-4e2d-4681-869f-472f61c8da40" path="/var/lib/kubelet/pods/4b1fdcf9-4e2d-4681-869f-472f61c8da40/volumes" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088062 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htt4n\" (UniqueName: \"kubernetes.io/projected/1d7097e8-da3f-444a-9e06-20960a21b38b-kube-api-access-htt4n\") pod \"1d7097e8-da3f-444a-9e06-20960a21b38b\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088161 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-combined-ca-bundle\") pod \"1d7097e8-da3f-444a-9e06-20960a21b38b\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088387 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-config-data\") pod \"1d7097e8-da3f-444a-9e06-20960a21b38b\" (UID: \"1d7097e8-da3f-444a-9e06-20960a21b38b\") " Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088713 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-public-tls-certs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088804 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d40873e-c669-464c-8c3d-bf5d60c99e62-logs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088827 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-config-data\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088854 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088933 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.088955 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjv2n\" (UniqueName: \"kubernetes.io/projected/4d40873e-c669-464c-8c3d-bf5d60c99e62-kube-api-access-jjv2n\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.093140 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d7097e8-da3f-444a-9e06-20960a21b38b-kube-api-access-htt4n" (OuterVolumeSpecName: "kube-api-access-htt4n") pod "1d7097e8-da3f-444a-9e06-20960a21b38b" (UID: "1d7097e8-da3f-444a-9e06-20960a21b38b"). InnerVolumeSpecName "kube-api-access-htt4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.115303 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d7097e8-da3f-444a-9e06-20960a21b38b" (UID: "1d7097e8-da3f-444a-9e06-20960a21b38b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.116512 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-config-data" (OuterVolumeSpecName: "config-data") pod "1d7097e8-da3f-444a-9e06-20960a21b38b" (UID: "1d7097e8-da3f-444a-9e06-20960a21b38b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.190533 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.190586 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjv2n\" (UniqueName: \"kubernetes.io/projected/4d40873e-c669-464c-8c3d-bf5d60c99e62-kube-api-access-jjv2n\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.190637 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-public-tls-certs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.190821 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d40873e-c669-464c-8c3d-bf5d60c99e62-logs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.190885 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-config-data\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.190926 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.191053 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.191073 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d7097e8-da3f-444a-9e06-20960a21b38b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.191087 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htt4n\" (UniqueName: \"kubernetes.io/projected/1d7097e8-da3f-444a-9e06-20960a21b38b-kube-api-access-htt4n\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.191607 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4d40873e-c669-464c-8c3d-bf5d60c99e62-logs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.194948 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.195416 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.195582 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-public-tls-certs\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.196197 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d40873e-c669-464c-8c3d-bf5d60c99e62-config-data\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.208427 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjv2n\" (UniqueName: \"kubernetes.io/projected/4d40873e-c669-464c-8c3d-bf5d60c99e62-kube-api-access-jjv2n\") pod \"nova-api-0\" (UID: \"4d40873e-c669-464c-8c3d-bf5d60c99e62\") " pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.306521 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.538678 4840 generic.go:334] "Generic (PLEG): container finished" podID="1d7097e8-da3f-444a-9e06-20960a21b38b" containerID="23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f" exitCode=0 Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.539126 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.540447 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d7097e8-da3f-444a-9e06-20960a21b38b","Type":"ContainerDied","Data":"23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f"} Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.540486 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1d7097e8-da3f-444a-9e06-20960a21b38b","Type":"ContainerDied","Data":"1b643f3d814045692a79b0011029bba70e606dbc1a120c16eeceeac8760d40cc"} Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.540502 4840 scope.go:117] "RemoveContainer" containerID="23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.584784 4840 scope.go:117] "RemoveContainer" containerID="23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f" Dec 05 15:21:12 crc kubenswrapper[4840]: E1205 15:21:12.585273 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f\": container with ID starting with 23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f not found: ID does not exist" containerID="23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.585321 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f"} err="failed to get container status \"23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f\": rpc error: code = NotFound desc = could not find container \"23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f\": container with ID starting with 23be1e3b3ccd92013a0220e808bb85dc192b3ff3b1124f716015b1dd1115d87f not found: ID does not exist" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.603118 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.612018 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.623066 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.624356 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.627643 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.635067 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.656506 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfx2k\" (UniqueName: \"kubernetes.io/projected/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-kube-api-access-xfx2k\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.656581 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.656668 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-config-data\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.758571 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-config-data\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.758689 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfx2k\" (UniqueName: \"kubernetes.io/projected/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-kube-api-access-xfx2k\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.758769 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.763961 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-config-data\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.764041 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.777268 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfx2k\" (UniqueName: \"kubernetes.io/projected/863cd7d3-d2a4-44eb-88c8-c3cd9259cb78-kube-api-access-xfx2k\") pod \"nova-scheduler-0\" (UID: \"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78\") " pod="openstack/nova-scheduler-0" Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.810439 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 15:21:12 crc kubenswrapper[4840]: W1205 15:21:12.811062 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d40873e_c669_464c_8c3d_bf5d60c99e62.slice/crio-a8ea274b4c552bf7481b6634b0dca85696d7e90e20bca0e2c35a70b639bcd047 WatchSource:0}: Error finding container a8ea274b4c552bf7481b6634b0dca85696d7e90e20bca0e2c35a70b639bcd047: Status 404 returned error can't find the container with id a8ea274b4c552bf7481b6634b0dca85696d7e90e20bca0e2c35a70b639bcd047 Dec 05 15:21:12 crc kubenswrapper[4840]: I1205 15:21:12.950245 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.366581 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 15:21:13 crc kubenswrapper[4840]: W1205 15:21:13.370479 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod863cd7d3_d2a4_44eb_88c8_c3cd9259cb78.slice/crio-11e9d68b7f6f326eea118a13c1c681e7b7a6ea1821d52b0d63caaeb1af8776d7 WatchSource:0}: Error finding container 11e9d68b7f6f326eea118a13c1c681e7b7a6ea1821d52b0d63caaeb1af8776d7: Status 404 returned error can't find the container with id 11e9d68b7f6f326eea118a13c1c681e7b7a6ea1821d52b0d63caaeb1af8776d7 Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.569504 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78","Type":"ContainerStarted","Data":"adf31179e40a9f17158a4b27db1ce186f8eb359cfd0cccf6c66700c662ebd7a6"} Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.569599 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"863cd7d3-d2a4-44eb-88c8-c3cd9259cb78","Type":"ContainerStarted","Data":"11e9d68b7f6f326eea118a13c1c681e7b7a6ea1821d52b0d63caaeb1af8776d7"} Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.573254 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d40873e-c669-464c-8c3d-bf5d60c99e62","Type":"ContainerStarted","Data":"f01d1c183ce2a5255a85bb24b9e78fdbbbc3e44527da40512713c1f708e33424"} Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.573301 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d40873e-c669-464c-8c3d-bf5d60c99e62","Type":"ContainerStarted","Data":"b385a388a8bb852409f3bef80bd746438a6c51497c4f65a6e46a37f5813eeed3"} Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.573316 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4d40873e-c669-464c-8c3d-bf5d60c99e62","Type":"ContainerStarted","Data":"a8ea274b4c552bf7481b6634b0dca85696d7e90e20bca0e2c35a70b639bcd047"} Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.585926 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.585909083 podStartE2EDuration="1.585909083s" podCreationTimestamp="2025-12-05 15:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:21:13.585688557 +0000 UTC m=+1351.926751171" watchObservedRunningTime="2025-12-05 15:21:13.585909083 +0000 UTC m=+1351.926971697" Dec 05 15:21:13 crc kubenswrapper[4840]: I1205 15:21:13.601158 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.601140302 podStartE2EDuration="2.601140302s" podCreationTimestamp="2025-12-05 15:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:21:13.600333439 +0000 UTC m=+1351.941396063" watchObservedRunningTime="2025-12-05 15:21:13.601140302 +0000 UTC m=+1351.942202916" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.078550 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d7097e8-da3f-444a-9e06-20960a21b38b" path="/var/lib/kubelet/pods/1d7097e8-da3f-444a-9e06-20960a21b38b/volumes" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.349728 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.395731 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-logs\") pod \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.395823 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-config-data\") pod \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.395855 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-nova-metadata-tls-certs\") pod \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.395988 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-combined-ca-bundle\") pod \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.396056 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hntvp\" (UniqueName: \"kubernetes.io/projected/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-kube-api-access-hntvp\") pod \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\" (UID: \"1c301a0e-a0bd-4ab4-b1e1-8d105434d534\") " Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.396380 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-logs" (OuterVolumeSpecName: "logs") pod "1c301a0e-a0bd-4ab4-b1e1-8d105434d534" (UID: "1c301a0e-a0bd-4ab4-b1e1-8d105434d534"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.402503 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-kube-api-access-hntvp" (OuterVolumeSpecName: "kube-api-access-hntvp") pod "1c301a0e-a0bd-4ab4-b1e1-8d105434d534" (UID: "1c301a0e-a0bd-4ab4-b1e1-8d105434d534"). InnerVolumeSpecName "kube-api-access-hntvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.427558 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-config-data" (OuterVolumeSpecName: "config-data") pod "1c301a0e-a0bd-4ab4-b1e1-8d105434d534" (UID: "1c301a0e-a0bd-4ab4-b1e1-8d105434d534"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.435231 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c301a0e-a0bd-4ab4-b1e1-8d105434d534" (UID: "1c301a0e-a0bd-4ab4-b1e1-8d105434d534"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.455763 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "1c301a0e-a0bd-4ab4-b1e1-8d105434d534" (UID: "1c301a0e-a0bd-4ab4-b1e1-8d105434d534"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.498678 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.498711 4840 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.498764 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.498773 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hntvp\" (UniqueName: \"kubernetes.io/projected/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-kube-api-access-hntvp\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.498801 4840 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c301a0e-a0bd-4ab4-b1e1-8d105434d534-logs\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.603076 4840 generic.go:334] "Generic (PLEG): container finished" podID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerID="ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38" exitCode=0 Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.603684 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.607008 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1c301a0e-a0bd-4ab4-b1e1-8d105434d534","Type":"ContainerDied","Data":"ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38"} Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.607192 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1c301a0e-a0bd-4ab4-b1e1-8d105434d534","Type":"ContainerDied","Data":"6eba4c592bbf5d70b108060ad55cf4fd050ea663254c1c26cb7fd8028aa7596d"} Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.607227 4840 scope.go:117] "RemoveContainer" containerID="ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.641278 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.645419 4840 scope.go:117] "RemoveContainer" containerID="c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.665550 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.676927 4840 scope.go:117] "RemoveContainer" containerID="ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38" Dec 05 15:21:14 crc kubenswrapper[4840]: E1205 15:21:14.677673 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38\": container with ID starting with ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38 not found: ID does not exist" containerID="ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.677707 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38"} err="failed to get container status \"ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38\": rpc error: code = NotFound desc = could not find container \"ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38\": container with ID starting with ab75ea0e9cf26282e92727ba6687b5e2fc8c2193c4c417791b70f3d73fe49a38 not found: ID does not exist" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.677733 4840 scope.go:117] "RemoveContainer" containerID="c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c" Dec 05 15:21:14 crc kubenswrapper[4840]: E1205 15:21:14.678310 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c\": container with ID starting with c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c not found: ID does not exist" containerID="c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.678351 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c"} err="failed to get container status \"c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c\": rpc error: code = NotFound desc = could not find container \"c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c\": container with ID starting with c1c139fe6eda54ff1d01fd0ccf4c5c90e95428e38b6cf88549573326b04f079c not found: ID does not exist" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.684248 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:21:14 crc kubenswrapper[4840]: E1205 15:21:14.684943 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-log" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.685084 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-log" Dec 05 15:21:14 crc kubenswrapper[4840]: E1205 15:21:14.685181 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-metadata" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.685261 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-metadata" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.685659 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-metadata" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.685765 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" containerName="nova-metadata-log" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.687105 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.689298 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.691113 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.702717 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0853ab99-c6ae-4ea4-8aa3-119437720120-logs\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.704305 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-config-data\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.704395 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7wrm\" (UniqueName: \"kubernetes.io/projected/0853ab99-c6ae-4ea4-8aa3-119437720120-kube-api-access-c7wrm\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.704589 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.708752 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.710219 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.812007 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.812199 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0853ab99-c6ae-4ea4-8aa3-119437720120-logs\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.812222 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-config-data\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.812259 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7wrm\" (UniqueName: \"kubernetes.io/projected/0853ab99-c6ae-4ea4-8aa3-119437720120-kube-api-access-c7wrm\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.812644 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0853ab99-c6ae-4ea4-8aa3-119437720120-logs\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.812748 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.815656 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.815672 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.816092 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0853ab99-c6ae-4ea4-8aa3-119437720120-config-data\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:14 crc kubenswrapper[4840]: I1205 15:21:14.829445 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7wrm\" (UniqueName: \"kubernetes.io/projected/0853ab99-c6ae-4ea4-8aa3-119437720120-kube-api-access-c7wrm\") pod \"nova-metadata-0\" (UID: \"0853ab99-c6ae-4ea4-8aa3-119437720120\") " pod="openstack/nova-metadata-0" Dec 05 15:21:15 crc kubenswrapper[4840]: I1205 15:21:15.004620 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 15:21:15 crc kubenswrapper[4840]: I1205 15:21:15.483055 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 15:21:15 crc kubenswrapper[4840]: I1205 15:21:15.584919 4840 scope.go:117] "RemoveContainer" containerID="ca23fa2f0ae720a8b0e15f4a88a88ad0c9085bda59ac8ea72db9a9e7453687a2" Dec 05 15:21:15 crc kubenswrapper[4840]: I1205 15:21:15.613217 4840 scope.go:117] "RemoveContainer" containerID="246e86458d6cfb28ba869ab3cc2876e0e302f9d05b76dd44d709e03fc11bb785" Dec 05 15:21:15 crc kubenswrapper[4840]: I1205 15:21:15.615954 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0853ab99-c6ae-4ea4-8aa3-119437720120","Type":"ContainerStarted","Data":"7812c00dfbc787368eae7d5537fb07d500e44c40df6b1ed9c8795a9eb3786c6d"} Dec 05 15:21:15 crc kubenswrapper[4840]: I1205 15:21:15.642635 4840 scope.go:117] "RemoveContainer" containerID="4fddf69981eb33f5caa2aab31fea8cdacb07098615863b13c1c218b630c3afa8" Dec 05 15:21:16 crc kubenswrapper[4840]: I1205 15:21:16.077333 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c301a0e-a0bd-4ab4-b1e1-8d105434d534" path="/var/lib/kubelet/pods/1c301a0e-a0bd-4ab4-b1e1-8d105434d534/volumes" Dec 05 15:21:16 crc kubenswrapper[4840]: I1205 15:21:16.626558 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0853ab99-c6ae-4ea4-8aa3-119437720120","Type":"ContainerStarted","Data":"0912ae4f24709f9d2d458ff14c6cc42e7d2f08ed5d4c3cb1bdeafbdf37f472bc"} Dec 05 15:21:16 crc kubenswrapper[4840]: I1205 15:21:16.626601 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0853ab99-c6ae-4ea4-8aa3-119437720120","Type":"ContainerStarted","Data":"9b1268d99637efd5662e4d732ff45ed0d2a4fd3b24c35b0ebffa746c83fcbd7f"} Dec 05 15:21:16 crc kubenswrapper[4840]: I1205 15:21:16.653324 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.653299922 podStartE2EDuration="2.653299922s" podCreationTimestamp="2025-12-05 15:21:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:21:16.647813888 +0000 UTC m=+1354.988876512" watchObservedRunningTime="2025-12-05 15:21:16.653299922 +0000 UTC m=+1354.994362536" Dec 05 15:21:17 crc kubenswrapper[4840]: I1205 15:21:17.950899 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 15:21:19 crc kubenswrapper[4840]: I1205 15:21:19.471666 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:21:19 crc kubenswrapper[4840]: I1205 15:21:19.472099 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:21:20 crc kubenswrapper[4840]: I1205 15:21:20.005130 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 15:21:20 crc kubenswrapper[4840]: I1205 15:21:20.005248 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 15:21:22 crc kubenswrapper[4840]: I1205 15:21:22.307202 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 15:21:22 crc kubenswrapper[4840]: I1205 15:21:22.307646 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 15:21:22 crc kubenswrapper[4840]: I1205 15:21:22.951360 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 15:21:22 crc kubenswrapper[4840]: I1205 15:21:22.977103 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 15:21:23 crc kubenswrapper[4840]: I1205 15:21:23.320085 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4d40873e-c669-464c-8c3d-bf5d60c99e62" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 15:21:23 crc kubenswrapper[4840]: I1205 15:21:23.320100 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4d40873e-c669-464c-8c3d-bf5d60c99e62" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 15:21:23 crc kubenswrapper[4840]: I1205 15:21:23.725326 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 15:21:25 crc kubenswrapper[4840]: I1205 15:21:25.005295 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 15:21:25 crc kubenswrapper[4840]: I1205 15:21:25.005673 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 15:21:26 crc kubenswrapper[4840]: I1205 15:21:26.027102 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0853ab99-c6ae-4ea4-8aa3-119437720120" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 15:21:26 crc kubenswrapper[4840]: I1205 15:21:26.027210 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0853ab99-c6ae-4ea4-8aa3-119437720120" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.206:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 15:21:29 crc kubenswrapper[4840]: I1205 15:21:29.753244 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 15:21:32 crc kubenswrapper[4840]: I1205 15:21:32.314544 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 15:21:32 crc kubenswrapper[4840]: I1205 15:21:32.316488 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 15:21:32 crc kubenswrapper[4840]: I1205 15:21:32.317044 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 15:21:32 crc kubenswrapper[4840]: I1205 15:21:32.317164 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 15:21:32 crc kubenswrapper[4840]: I1205 15:21:32.334224 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 15:21:32 crc kubenswrapper[4840]: I1205 15:21:32.335800 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 15:21:35 crc kubenswrapper[4840]: I1205 15:21:35.011312 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 15:21:35 crc kubenswrapper[4840]: I1205 15:21:35.012102 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 15:21:35 crc kubenswrapper[4840]: I1205 15:21:35.016719 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 15:21:35 crc kubenswrapper[4840]: I1205 15:21:35.828488 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.627858 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l8vz9"] Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.629956 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.646649 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l8vz9"] Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.758360 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2w4f\" (UniqueName: \"kubernetes.io/projected/1c1a5e9e-4618-4e48-beb7-16857a715739-kube-api-access-r2w4f\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.758440 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-catalog-content\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.758504 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-utilities\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.860770 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2w4f\" (UniqueName: \"kubernetes.io/projected/1c1a5e9e-4618-4e48-beb7-16857a715739-kube-api-access-r2w4f\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.860826 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-catalog-content\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.860858 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-utilities\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.861445 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-catalog-content\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.861468 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-utilities\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.882362 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2w4f\" (UniqueName: \"kubernetes.io/projected/1c1a5e9e-4618-4e48-beb7-16857a715739-kube-api-access-r2w4f\") pod \"redhat-operators-l8vz9\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:37 crc kubenswrapper[4840]: I1205 15:21:37.953836 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:38 crc kubenswrapper[4840]: I1205 15:21:38.437356 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l8vz9"] Dec 05 15:21:38 crc kubenswrapper[4840]: W1205 15:21:38.443350 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c1a5e9e_4618_4e48_beb7_16857a715739.slice/crio-b306785a235291488fcb5569ec3020945ab6e724c9f1804e0aa22d0b18cd4571 WatchSource:0}: Error finding container b306785a235291488fcb5569ec3020945ab6e724c9f1804e0aa22d0b18cd4571: Status 404 returned error can't find the container with id b306785a235291488fcb5569ec3020945ab6e724c9f1804e0aa22d0b18cd4571 Dec 05 15:21:38 crc kubenswrapper[4840]: I1205 15:21:38.855061 4840 generic.go:334] "Generic (PLEG): container finished" podID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerID="102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5" exitCode=0 Dec 05 15:21:38 crc kubenswrapper[4840]: I1205 15:21:38.855118 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8vz9" event={"ID":"1c1a5e9e-4618-4e48-beb7-16857a715739","Type":"ContainerDied","Data":"102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5"} Dec 05 15:21:38 crc kubenswrapper[4840]: I1205 15:21:38.855352 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8vz9" event={"ID":"1c1a5e9e-4618-4e48-beb7-16857a715739","Type":"ContainerStarted","Data":"b306785a235291488fcb5569ec3020945ab6e724c9f1804e0aa22d0b18cd4571"} Dec 05 15:21:39 crc kubenswrapper[4840]: I1205 15:21:39.867462 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8vz9" event={"ID":"1c1a5e9e-4618-4e48-beb7-16857a715739","Type":"ContainerStarted","Data":"39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651"} Dec 05 15:21:41 crc kubenswrapper[4840]: I1205 15:21:41.887999 4840 generic.go:334] "Generic (PLEG): container finished" podID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerID="39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651" exitCode=0 Dec 05 15:21:41 crc kubenswrapper[4840]: I1205 15:21:41.888060 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8vz9" event={"ID":"1c1a5e9e-4618-4e48-beb7-16857a715739","Type":"ContainerDied","Data":"39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651"} Dec 05 15:21:42 crc kubenswrapper[4840]: I1205 15:21:42.900028 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8vz9" event={"ID":"1c1a5e9e-4618-4e48-beb7-16857a715739","Type":"ContainerStarted","Data":"66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf"} Dec 05 15:21:42 crc kubenswrapper[4840]: I1205 15:21:42.921559 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l8vz9" podStartSLOduration=2.494749186 podStartE2EDuration="5.9215402s" podCreationTimestamp="2025-12-05 15:21:37 +0000 UTC" firstStartedPulling="2025-12-05 15:21:38.856420286 +0000 UTC m=+1377.197482900" lastFinishedPulling="2025-12-05 15:21:42.2832113 +0000 UTC m=+1380.624273914" observedRunningTime="2025-12-05 15:21:42.920068029 +0000 UTC m=+1381.261130643" watchObservedRunningTime="2025-12-05 15:21:42.9215402 +0000 UTC m=+1381.262602814" Dec 05 15:21:44 crc kubenswrapper[4840]: I1205 15:21:44.676159 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:21:45 crc kubenswrapper[4840]: I1205 15:21:45.951161 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:21:47 crc kubenswrapper[4840]: I1205 15:21:47.954570 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:47 crc kubenswrapper[4840]: I1205 15:21:47.954974 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:48 crc kubenswrapper[4840]: I1205 15:21:48.753406 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerName="rabbitmq" containerID="cri-o://2c611d00f0b25d3869e1beb7eb13a1b7e50608dc6276ce0868249808e9d71ed1" gracePeriod=604796 Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.008180 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l8vz9" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="registry-server" probeResult="failure" output=< Dec 05 15:21:49 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:21:49 crc kubenswrapper[4840]: > Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.472427 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.472806 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.472919 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.473976 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40055bc2420888638e5359189b9a9948a0cbd2aa70e5300e22d9c614bb6d6f19"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.474088 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://40055bc2420888638e5359189b9a9948a0cbd2aa70e5300e22d9c614bb6d6f19" gracePeriod=600 Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.962005 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="40055bc2420888638e5359189b9a9948a0cbd2aa70e5300e22d9c614bb6d6f19" exitCode=0 Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.962044 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"40055bc2420888638e5359189b9a9948a0cbd2aa70e5300e22d9c614bb6d6f19"} Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.962715 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b"} Dec 05 15:21:49 crc kubenswrapper[4840]: I1205 15:21:49.962818 4840 scope.go:117] "RemoveContainer" containerID="40a488d86bcc9fa72aa671ce4746fbea89ae7ae377bf2c9aff3cd9df6bf2d02c" Dec 05 15:21:50 crc kubenswrapper[4840]: I1205 15:21:50.451355 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="rabbitmq" containerID="cri-o://d69c4ae1302b8f1f898d03d8e3007d19a59c63e4e1e7dbbcdf22386585f688a8" gracePeriod=604796 Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.066311 4840 generic.go:334] "Generic (PLEG): container finished" podID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerID="2c611d00f0b25d3869e1beb7eb13a1b7e50608dc6276ce0868249808e9d71ed1" exitCode=0 Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.066382 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e8fc49c1-0820-4dcb-9a50-9d3504b768d9","Type":"ContainerDied","Data":"2c611d00f0b25d3869e1beb7eb13a1b7e50608dc6276ce0868249808e9d71ed1"} Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.366173 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547020 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-server-conf\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547069 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-erlang-cookie\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547111 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-plugins-conf\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547159 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b2vv\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-kube-api-access-8b2vv\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547231 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-tls\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547267 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-erlang-cookie-secret\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547308 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-pod-info\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547376 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-confd\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547438 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-plugins\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547482 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-config-data\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.547584 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\" (UID: \"e8fc49c1-0820-4dcb-9a50-9d3504b768d9\") " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.549128 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.549650 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.549708 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.556055 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.556277 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-pod-info" (OuterVolumeSpecName: "pod-info") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.556414 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.556715 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.568159 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-kube-api-access-8b2vv" (OuterVolumeSpecName: "kube-api-access-8b2vv") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "kube-api-access-8b2vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.594222 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-config-data" (OuterVolumeSpecName: "config-data") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.624684 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-server-conf" (OuterVolumeSpecName: "server-conf") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649109 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649139 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649164 4840 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649173 4840 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649182 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649192 4840 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649202 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b2vv\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-kube-api-access-8b2vv\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649210 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649220 4840 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.649229 4840 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.682918 4840 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.688498 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e8fc49c1-0820-4dcb-9a50-9d3504b768d9" (UID: "e8fc49c1-0820-4dcb-9a50-9d3504b768d9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.750533 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e8fc49c1-0820-4dcb-9a50-9d3504b768d9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:55 crc kubenswrapper[4840]: I1205 15:21:55.750596 4840 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.084282 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e8fc49c1-0820-4dcb-9a50-9d3504b768d9","Type":"ContainerDied","Data":"75cf0e11f87e9cb61b90271e5611fcf637b08e90fd20b5a8db321a142bbccd87"} Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.084686 4840 scope.go:117] "RemoveContainer" containerID="2c611d00f0b25d3869e1beb7eb13a1b7e50608dc6276ce0868249808e9d71ed1" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.084890 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.123551 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.128285 4840 scope.go:117] "RemoveContainer" containerID="7d8de606e132e801bf3051d60b8122b938e918f5ababd4e11fc0130a6cfaf2ca" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.132971 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.156300 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:21:56 crc kubenswrapper[4840]: E1205 15:21:56.156732 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerName="setup-container" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.156751 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerName="setup-container" Dec 05 15:21:56 crc kubenswrapper[4840]: E1205 15:21:56.156765 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerName="rabbitmq" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.156774 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerName="rabbitmq" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.157000 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" containerName="rabbitmq" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.158233 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.205271 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.205399 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.205567 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.205616 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.205618 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.205667 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.206153 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-gwc9c" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.222372 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.359715 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.359803 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.359884 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.359920 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.359961 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.359991 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.360026 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-config-data\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.360060 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sfcw\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-kube-api-access-7sfcw\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.360097 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.360120 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.360178 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462554 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462632 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462670 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462696 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462725 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-config-data\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462745 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sfcw\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-kube-api-access-7sfcw\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462774 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462799 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462820 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462882 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.462925 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.463190 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.463268 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.463344 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.464279 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-config-data\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.464375 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.466413 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.468306 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.468371 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.470795 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.472356 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.482275 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sfcw\" (UniqueName: \"kubernetes.io/projected/4b53cdac-e8cf-4dc5-abed-0d20e7ca8140-kube-api-access-7sfcw\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.495373 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140\") " pod="openstack/rabbitmq-server-0" Dec 05 15:21:56 crc kubenswrapper[4840]: I1205 15:21:56.533788 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.101746 4840 generic.go:334] "Generic (PLEG): container finished" podID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerID="d69c4ae1302b8f1f898d03d8e3007d19a59c63e4e1e7dbbcdf22386585f688a8" exitCode=0 Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.101936 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f169c577-448f-45db-bcdd-f34f5c24e6bb","Type":"ContainerDied","Data":"d69c4ae1302b8f1f898d03d8e3007d19a59c63e4e1e7dbbcdf22386585f688a8"} Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.195821 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 15:21:57 crc kubenswrapper[4840]: W1205 15:21:57.202993 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b53cdac_e8cf_4dc5_abed_0d20e7ca8140.slice/crio-ee4a2c6b8876402a433d40bbd24efbe9388704a6b4b3e2c9977c99fd4e1858c4 WatchSource:0}: Error finding container ee4a2c6b8876402a433d40bbd24efbe9388704a6b4b3e2c9977c99fd4e1858c4: Status 404 returned error can't find the container with id ee4a2c6b8876402a433d40bbd24efbe9388704a6b4b3e2c9977c99fd4e1858c4 Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.535302 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715581 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-server-conf\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715638 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-tls\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715676 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715741 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-confd\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715787 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-plugins\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715856 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-config-data\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715911 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f169c577-448f-45db-bcdd-f34f5c24e6bb-erlang-cookie-secret\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715954 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-erlang-cookie\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.715989 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-plugins-conf\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.716019 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f169c577-448f-45db-bcdd-f34f5c24e6bb-pod-info\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.716094 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7jxj\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-kube-api-access-t7jxj\") pod \"f169c577-448f-45db-bcdd-f34f5c24e6bb\" (UID: \"f169c577-448f-45db-bcdd-f34f5c24e6bb\") " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.716887 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.717360 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.717977 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.719697 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.720515 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.720599 4840 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.721986 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f169c577-448f-45db-bcdd-f34f5c24e6bb-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.722155 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.723191 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.726771 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/f169c577-448f-45db-bcdd-f34f5c24e6bb-pod-info" (OuterVolumeSpecName: "pod-info") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.728573 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-kube-api-access-t7jxj" (OuterVolumeSpecName: "kube-api-access-t7jxj") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "kube-api-access-t7jxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.752094 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-config-data" (OuterVolumeSpecName: "config-data") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.782598 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-server-conf" (OuterVolumeSpecName: "server-conf") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.823741 4840 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f169c577-448f-45db-bcdd-f34f5c24e6bb-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.823780 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7jxj\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-kube-api-access-t7jxj\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.823792 4840 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.823803 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.823841 4840 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.823853 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f169c577-448f-45db-bcdd-f34f5c24e6bb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.823880 4840 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f169c577-448f-45db-bcdd-f34f5c24e6bb-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.846737 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "f169c577-448f-45db-bcdd-f34f5c24e6bb" (UID: "f169c577-448f-45db-bcdd-f34f5c24e6bb"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.854282 4840 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.925808 4840 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f169c577-448f-45db-bcdd-f34f5c24e6bb-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:57 crc kubenswrapper[4840]: I1205 15:21:57.925849 4840 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.029014 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.079574 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8fc49c1-0820-4dcb-9a50-9d3504b768d9" path="/var/lib/kubelet/pods/e8fc49c1-0820-4dcb-9a50-9d3504b768d9/volumes" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.080964 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.112617 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140","Type":"ContainerStarted","Data":"ee4a2c6b8876402a433d40bbd24efbe9388704a6b4b3e2c9977c99fd4e1858c4"} Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.115337 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f169c577-448f-45db-bcdd-f34f5c24e6bb","Type":"ContainerDied","Data":"74c93ee1ddd798004ea65f0b10f368feb2fc5b9e1620af9c0ff331d6b4e36000"} Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.115693 4840 scope.go:117] "RemoveContainer" containerID="d69c4ae1302b8f1f898d03d8e3007d19a59c63e4e1e7dbbcdf22386585f688a8" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.115374 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.141334 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.149622 4840 scope.go:117] "RemoveContainer" containerID="a44027a9b622191feea57e10e032a53de96578f30e052b68af4d233fb01ec896" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.178763 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.210468 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:21:58 crc kubenswrapper[4840]: E1205 15:21:58.211202 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="setup-container" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.211223 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="setup-container" Dec 05 15:21:58 crc kubenswrapper[4840]: E1205 15:21:58.211242 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="rabbitmq" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.211249 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="rabbitmq" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.212104 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="rabbitmq" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.214156 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.216759 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.217404 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.217845 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.218267 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-gf9wj" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.218382 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.218661 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.220147 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.229926 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242141 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242185 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9ppz\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-kube-api-access-z9ppz\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242273 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242296 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c0b4037e-9bd6-4a53-84b3-941d72023ce3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242317 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242479 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242559 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242606 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242626 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c0b4037e-9bd6-4a53-84b3-941d72023ce3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242796 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.242919 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.268491 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l8vz9"] Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347482 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347562 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9ppz\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-kube-api-access-z9ppz\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347676 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347711 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c0b4037e-9bd6-4a53-84b3-941d72023ce3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347754 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347792 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347883 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347926 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.347958 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c0b4037e-9bd6-4a53-84b3-941d72023ce3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.348029 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.348080 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.348502 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.353987 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.354377 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.356447 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.356970 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.357061 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c0b4037e-9bd6-4a53-84b3-941d72023ce3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.358732 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c0b4037e-9bd6-4a53-84b3-941d72023ce3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.358972 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.359611 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.371152 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c0b4037e-9bd6-4a53-84b3-941d72023ce3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.413698 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.460372 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9ppz\" (UniqueName: \"kubernetes.io/projected/c0b4037e-9bd6-4a53-84b3-941d72023ce3-kube-api-access-z9ppz\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0b4037e-9bd6-4a53-84b3-941d72023ce3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:58 crc kubenswrapper[4840]: I1205 15:21:58.548192 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.127049 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140","Type":"ContainerStarted","Data":"4fb3bc939769691873e133cf0da41478d2517abc982418359b6bedaabafe640b"} Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.129119 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l8vz9" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="registry-server" containerID="cri-o://66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf" gracePeriod=2 Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.153665 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-q464m"] Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.155511 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.161236 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.188710 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-q464m"] Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.268559 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcgk2\" (UniqueName: \"kubernetes.io/projected/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-kube-api-access-gcgk2\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.268678 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.268758 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-svc\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.268845 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.268878 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.268933 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-config\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.268963 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.370459 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-svc\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.370523 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.370542 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.370575 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-config\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.370605 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.370653 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcgk2\" (UniqueName: \"kubernetes.io/projected/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-kube-api-access-gcgk2\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.370701 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.371586 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.372127 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-svc\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.372954 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-config\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.373190 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.373197 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.373193 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.400658 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcgk2\" (UniqueName: \"kubernetes.io/projected/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-kube-api-access-gcgk2\") pod \"dnsmasq-dns-5576978c7c-q464m\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.488417 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.683886 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.755167 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 15:21:59 crc kubenswrapper[4840]: W1205 15:21:59.763260 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0b4037e_9bd6_4a53_84b3_941d72023ce3.slice/crio-4a8fa1406a12275794424d7aea4c94ab49622a5eddbf7de96b4f96794de213d7 WatchSource:0}: Error finding container 4a8fa1406a12275794424d7aea4c94ab49622a5eddbf7de96b4f96794de213d7: Status 404 returned error can't find the container with id 4a8fa1406a12275794424d7aea4c94ab49622a5eddbf7de96b4f96794de213d7 Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.777293 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-utilities\") pod \"1c1a5e9e-4618-4e48-beb7-16857a715739\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.777420 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2w4f\" (UniqueName: \"kubernetes.io/projected/1c1a5e9e-4618-4e48-beb7-16857a715739-kube-api-access-r2w4f\") pod \"1c1a5e9e-4618-4e48-beb7-16857a715739\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.777492 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-catalog-content\") pod \"1c1a5e9e-4618-4e48-beb7-16857a715739\" (UID: \"1c1a5e9e-4618-4e48-beb7-16857a715739\") " Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.778555 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-utilities" (OuterVolumeSpecName: "utilities") pod "1c1a5e9e-4618-4e48-beb7-16857a715739" (UID: "1c1a5e9e-4618-4e48-beb7-16857a715739"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.784174 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.806123 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c1a5e9e-4618-4e48-beb7-16857a715739-kube-api-access-r2w4f" (OuterVolumeSpecName: "kube-api-access-r2w4f") pod "1c1a5e9e-4618-4e48-beb7-16857a715739" (UID: "1c1a5e9e-4618-4e48-beb7-16857a715739"). InnerVolumeSpecName "kube-api-access-r2w4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.886666 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2w4f\" (UniqueName: \"kubernetes.io/projected/1c1a5e9e-4618-4e48-beb7-16857a715739-kube-api-access-r2w4f\") on node \"crc\" DevicePath \"\"" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.891837 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c1a5e9e-4618-4e48-beb7-16857a715739" (UID: "1c1a5e9e-4618-4e48-beb7-16857a715739"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:21:59 crc kubenswrapper[4840]: I1205 15:21:59.988548 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c1a5e9e-4618-4e48-beb7-16857a715739-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.010998 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-q464m"] Dec 05 15:22:00 crc kubenswrapper[4840]: W1205 15:22:00.011401 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f3f66e4_fc7c_464c_9fa3_ec2fb6d2054e.slice/crio-b432cfd3316d61c5e82be40e9369e392bed9310d2607ed548abffd8abdd12a53 WatchSource:0}: Error finding container b432cfd3316d61c5e82be40e9369e392bed9310d2607ed548abffd8abdd12a53: Status 404 returned error can't find the container with id b432cfd3316d61c5e82be40e9369e392bed9310d2607ed548abffd8abdd12a53 Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.081262 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" path="/var/lib/kubelet/pods/f169c577-448f-45db-bcdd-f34f5c24e6bb/volumes" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.141621 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0b4037e-9bd6-4a53-84b3-941d72023ce3","Type":"ContainerStarted","Data":"4a8fa1406a12275794424d7aea4c94ab49622a5eddbf7de96b4f96794de213d7"} Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.143402 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-q464m" event={"ID":"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e","Type":"ContainerStarted","Data":"b432cfd3316d61c5e82be40e9369e392bed9310d2607ed548abffd8abdd12a53"} Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.146875 4840 generic.go:334] "Generic (PLEG): container finished" podID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerID="66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf" exitCode=0 Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.146895 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8vz9" event={"ID":"1c1a5e9e-4618-4e48-beb7-16857a715739","Type":"ContainerDied","Data":"66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf"} Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.146933 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l8vz9" event={"ID":"1c1a5e9e-4618-4e48-beb7-16857a715739","Type":"ContainerDied","Data":"b306785a235291488fcb5569ec3020945ab6e724c9f1804e0aa22d0b18cd4571"} Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.146957 4840 scope.go:117] "RemoveContainer" containerID="66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.146969 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l8vz9" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.196510 4840 scope.go:117] "RemoveContainer" containerID="39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.232925 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l8vz9"] Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.235749 4840 scope.go:117] "RemoveContainer" containerID="102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.241276 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l8vz9"] Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.293317 4840 scope.go:117] "RemoveContainer" containerID="66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf" Dec 05 15:22:00 crc kubenswrapper[4840]: E1205 15:22:00.295236 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf\": container with ID starting with 66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf not found: ID does not exist" containerID="66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.295289 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf"} err="failed to get container status \"66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf\": rpc error: code = NotFound desc = could not find container \"66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf\": container with ID starting with 66dd4aea91f8b82dac5ee1226c0b06ccf0a81edf1d1831042c97b57c1a1555bf not found: ID does not exist" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.295320 4840 scope.go:117] "RemoveContainer" containerID="39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651" Dec 05 15:22:00 crc kubenswrapper[4840]: E1205 15:22:00.295623 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651\": container with ID starting with 39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651 not found: ID does not exist" containerID="39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.295664 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651"} err="failed to get container status \"39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651\": rpc error: code = NotFound desc = could not find container \"39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651\": container with ID starting with 39a17840eb0d87c338ee1a5cb3b2843936efcccf21bdfe67674b3e42936c9651 not found: ID does not exist" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.295691 4840 scope.go:117] "RemoveContainer" containerID="102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5" Dec 05 15:22:00 crc kubenswrapper[4840]: E1205 15:22:00.296062 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5\": container with ID starting with 102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5 not found: ID does not exist" containerID="102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5" Dec 05 15:22:00 crc kubenswrapper[4840]: I1205 15:22:00.296101 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5"} err="failed to get container status \"102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5\": rpc error: code = NotFound desc = could not find container \"102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5\": container with ID starting with 102c0041971fdce94370260fe4b18299860c0846ead1835da675010868bc5fc5 not found: ID does not exist" Dec 05 15:22:01 crc kubenswrapper[4840]: I1205 15:22:01.155845 4840 generic.go:334] "Generic (PLEG): container finished" podID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerID="a002d338a42aa2b5a5278756e5ab313d12341e456845c71910b243ac455d29bd" exitCode=0 Dec 05 15:22:01 crc kubenswrapper[4840]: I1205 15:22:01.156168 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-q464m" event={"ID":"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e","Type":"ContainerDied","Data":"a002d338a42aa2b5a5278756e5ab313d12341e456845c71910b243ac455d29bd"} Dec 05 15:22:02 crc kubenswrapper[4840]: I1205 15:22:02.082143 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" path="/var/lib/kubelet/pods/1c1a5e9e-4618-4e48-beb7-16857a715739/volumes" Dec 05 15:22:02 crc kubenswrapper[4840]: I1205 15:22:02.169023 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0b4037e-9bd6-4a53-84b3-941d72023ce3","Type":"ContainerStarted","Data":"529bc60e2df42794f81ed1423725fa3f255ac0e8a7e371363ef440e190806b7f"} Dec 05 15:22:02 crc kubenswrapper[4840]: I1205 15:22:02.171793 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-q464m" event={"ID":"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e","Type":"ContainerStarted","Data":"1e12e4e3e7a0cf197d0703a03f4d908131d6643b52487440bccb212292d379c4"} Dec 05 15:22:02 crc kubenswrapper[4840]: I1205 15:22:02.172238 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:22:02 crc kubenswrapper[4840]: I1205 15:22:02.213531 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5576978c7c-q464m" podStartSLOduration=3.213510499 podStartE2EDuration="3.213510499s" podCreationTimestamp="2025-12-05 15:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:22:02.212919172 +0000 UTC m=+1400.553981816" watchObservedRunningTime="2025-12-05 15:22:02.213510499 +0000 UTC m=+1400.554573113" Dec 05 15:22:02 crc kubenswrapper[4840]: I1205 15:22:02.447057 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f169c577-448f-45db-bcdd-f34f5c24e6bb" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: i/o timeout" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.491818 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.616101 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-jq7m4"] Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.616332 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" podUID="7c96306d-162b-44be-8dfb-fea1280e5644" containerName="dnsmasq-dns" containerID="cri-o://0f7213ca076b969b3fc05dc52b961da27bf62ba6f9355abc17636d312dadc0d6" gracePeriod=10 Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.713914 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-dkpw9"] Dec 05 15:22:09 crc kubenswrapper[4840]: E1205 15:22:09.714339 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="extract-utilities" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.714354 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="extract-utilities" Dec 05 15:22:09 crc kubenswrapper[4840]: E1205 15:22:09.714373 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="extract-content" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.714379 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="extract-content" Dec 05 15:22:09 crc kubenswrapper[4840]: E1205 15:22:09.714412 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="registry-server" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.714419 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="registry-server" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.714630 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c1a5e9e-4618-4e48-beb7-16857a715739" containerName="registry-server" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.715710 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.726558 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-dkpw9"] Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.913710 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.914993 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.915196 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.915276 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk7pv\" (UniqueName: \"kubernetes.io/projected/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-kube-api-access-gk7pv\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.915394 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.915483 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:09 crc kubenswrapper[4840]: I1205 15:22:09.915592 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-config\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.017168 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.017267 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.017338 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.017378 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk7pv\" (UniqueName: \"kubernetes.io/projected/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-kube-api-access-gk7pv\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.017428 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.017455 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.017515 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-config\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.018218 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.018323 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-config\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.018321 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.018658 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.018830 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.018955 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.040972 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk7pv\" (UniqueName: \"kubernetes.io/projected/0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe-kube-api-access-gk7pv\") pod \"dnsmasq-dns-8c6f6df99-dkpw9\" (UID: \"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe\") " pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.122712 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.256997 4840 generic.go:334] "Generic (PLEG): container finished" podID="7c96306d-162b-44be-8dfb-fea1280e5644" containerID="0f7213ca076b969b3fc05dc52b961da27bf62ba6f9355abc17636d312dadc0d6" exitCode=0 Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.257192 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" event={"ID":"7c96306d-162b-44be-8dfb-fea1280e5644","Type":"ContainerDied","Data":"0f7213ca076b969b3fc05dc52b961da27bf62ba6f9355abc17636d312dadc0d6"} Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.257356 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" event={"ID":"7c96306d-162b-44be-8dfb-fea1280e5644","Type":"ContainerDied","Data":"732a72316b87c5bd8eb38413a3e0a233d5001e668ab247bb5c9283f69441cdec"} Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.257389 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="732a72316b87c5bd8eb38413a3e0a233d5001e668ab247bb5c9283f69441cdec" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.276180 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.338808 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-swift-storage-0\") pod \"7c96306d-162b-44be-8dfb-fea1280e5644\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.338954 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-sb\") pod \"7c96306d-162b-44be-8dfb-fea1280e5644\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.338990 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-config\") pod \"7c96306d-162b-44be-8dfb-fea1280e5644\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.339031 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-nb\") pod \"7c96306d-162b-44be-8dfb-fea1280e5644\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.339088 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-svc\") pod \"7c96306d-162b-44be-8dfb-fea1280e5644\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.339177 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkqq8\" (UniqueName: \"kubernetes.io/projected/7c96306d-162b-44be-8dfb-fea1280e5644-kube-api-access-gkqq8\") pod \"7c96306d-162b-44be-8dfb-fea1280e5644\" (UID: \"7c96306d-162b-44be-8dfb-fea1280e5644\") " Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.372033 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c96306d-162b-44be-8dfb-fea1280e5644-kube-api-access-gkqq8" (OuterVolumeSpecName: "kube-api-access-gkqq8") pod "7c96306d-162b-44be-8dfb-fea1280e5644" (UID: "7c96306d-162b-44be-8dfb-fea1280e5644"). InnerVolumeSpecName "kube-api-access-gkqq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.403341 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7c96306d-162b-44be-8dfb-fea1280e5644" (UID: "7c96306d-162b-44be-8dfb-fea1280e5644"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.404946 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-config" (OuterVolumeSpecName: "config") pod "7c96306d-162b-44be-8dfb-fea1280e5644" (UID: "7c96306d-162b-44be-8dfb-fea1280e5644"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.429448 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7c96306d-162b-44be-8dfb-fea1280e5644" (UID: "7c96306d-162b-44be-8dfb-fea1280e5644"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.446536 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7c96306d-162b-44be-8dfb-fea1280e5644" (UID: "7c96306d-162b-44be-8dfb-fea1280e5644"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.448355 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.448391 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkqq8\" (UniqueName: \"kubernetes.io/projected/7c96306d-162b-44be-8dfb-fea1280e5644-kube-api-access-gkqq8\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.448406 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.448417 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.448430 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.494741 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7c96306d-162b-44be-8dfb-fea1280e5644" (UID: "7c96306d-162b-44be-8dfb-fea1280e5644"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.549766 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c96306d-162b-44be-8dfb-fea1280e5644-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:10 crc kubenswrapper[4840]: I1205 15:22:10.633238 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-dkpw9"] Dec 05 15:22:10 crc kubenswrapper[4840]: W1205 15:22:10.633973 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0594160f_f4bb_40c3_b7e0_9dc2bb2cf1fe.slice/crio-972e18c68da63dd1b0ea27db4b1d1afaa775bfee231284d04e741ac2f4c310b2 WatchSource:0}: Error finding container 972e18c68da63dd1b0ea27db4b1d1afaa775bfee231284d04e741ac2f4c310b2: Status 404 returned error can't find the container with id 972e18c68da63dd1b0ea27db4b1d1afaa775bfee231284d04e741ac2f4c310b2 Dec 05 15:22:11 crc kubenswrapper[4840]: I1205 15:22:11.269236 4840 generic.go:334] "Generic (PLEG): container finished" podID="0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe" containerID="83cd78dce4d5abb390f2a24275f239ed0a57d96698a788a152833271545e3ce8" exitCode=0 Dec 05 15:22:11 crc kubenswrapper[4840]: I1205 15:22:11.269289 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" event={"ID":"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe","Type":"ContainerDied","Data":"83cd78dce4d5abb390f2a24275f239ed0a57d96698a788a152833271545e3ce8"} Dec 05 15:22:11 crc kubenswrapper[4840]: I1205 15:22:11.269595 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-jq7m4" Dec 05 15:22:11 crc kubenswrapper[4840]: I1205 15:22:11.269602 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" event={"ID":"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe","Type":"ContainerStarted","Data":"972e18c68da63dd1b0ea27db4b1d1afaa775bfee231284d04e741ac2f4c310b2"} Dec 05 15:22:11 crc kubenswrapper[4840]: I1205 15:22:11.462995 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-jq7m4"] Dec 05 15:22:11 crc kubenswrapper[4840]: I1205 15:22:11.473460 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-jq7m4"] Dec 05 15:22:12 crc kubenswrapper[4840]: I1205 15:22:12.100490 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c96306d-162b-44be-8dfb-fea1280e5644" path="/var/lib/kubelet/pods/7c96306d-162b-44be-8dfb-fea1280e5644/volumes" Dec 05 15:22:12 crc kubenswrapper[4840]: I1205 15:22:12.280502 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" event={"ID":"0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe","Type":"ContainerStarted","Data":"0e7fe53be80042e35405537c482d302fbf42f8ad2d6d2532cc2abee59a4455d6"} Dec 05 15:22:12 crc kubenswrapper[4840]: I1205 15:22:12.280947 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:12 crc kubenswrapper[4840]: I1205 15:22:12.302334 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" podStartSLOduration=3.302316378 podStartE2EDuration="3.302316378s" podCreationTimestamp="2025-12-05 15:22:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:22:12.30167149 +0000 UTC m=+1410.642734104" watchObservedRunningTime="2025-12-05 15:22:12.302316378 +0000 UTC m=+1410.643378992" Dec 05 15:22:15 crc kubenswrapper[4840]: I1205 15:22:15.919637 4840 scope.go:117] "RemoveContainer" containerID="885a721a096d0a6f16b2f979ede5a75c2dc24d24ea9a4584870baa4d5c441614" Dec 05 15:22:15 crc kubenswrapper[4840]: I1205 15:22:15.967332 4840 scope.go:117] "RemoveContainer" containerID="d395a6914261b636480321de2187c5b4351f05177d71b3b9f45f4d80de02939f" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.126087 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8c6f6df99-dkpw9" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.192098 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-q464m"] Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.192451 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5576978c7c-q464m" podUID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerName="dnsmasq-dns" containerID="cri-o://1e12e4e3e7a0cf197d0703a03f4d908131d6643b52487440bccb212292d379c4" gracePeriod=10 Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.362536 4840 generic.go:334] "Generic (PLEG): container finished" podID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerID="1e12e4e3e7a0cf197d0703a03f4d908131d6643b52487440bccb212292d379c4" exitCode=0 Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.362579 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-q464m" event={"ID":"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e","Type":"ContainerDied","Data":"1e12e4e3e7a0cf197d0703a03f4d908131d6643b52487440bccb212292d379c4"} Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.675971 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.774729 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-sb\") pod \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.774814 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-svc\") pod \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.774854 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-nb\") pod \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.774892 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-swift-storage-0\") pod \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.774966 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcgk2\" (UniqueName: \"kubernetes.io/projected/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-kube-api-access-gcgk2\") pod \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.774988 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-openstack-edpm-ipam\") pod \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.775043 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-config\") pod \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\" (UID: \"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e\") " Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.806007 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-kube-api-access-gcgk2" (OuterVolumeSpecName: "kube-api-access-gcgk2") pod "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" (UID: "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e"). InnerVolumeSpecName "kube-api-access-gcgk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.843694 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" (UID: "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.860917 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" (UID: "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.877010 4840 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.877179 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcgk2\" (UniqueName: \"kubernetes.io/projected/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-kube-api-access-gcgk2\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.877192 4840 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.880258 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" (UID: "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.883140 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-config" (OuterVolumeSpecName: "config") pod "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" (UID: "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.883335 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" (UID: "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.898311 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" (UID: "9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.978640 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.978682 4840 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.978695 4840 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:20 crc kubenswrapper[4840]: I1205 15:22:20.978705 4840 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:21 crc kubenswrapper[4840]: I1205 15:22:21.373728 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-q464m" event={"ID":"9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e","Type":"ContainerDied","Data":"b432cfd3316d61c5e82be40e9369e392bed9310d2607ed548abffd8abdd12a53"} Dec 05 15:22:21 crc kubenswrapper[4840]: I1205 15:22:21.373780 4840 scope.go:117] "RemoveContainer" containerID="1e12e4e3e7a0cf197d0703a03f4d908131d6643b52487440bccb212292d379c4" Dec 05 15:22:21 crc kubenswrapper[4840]: I1205 15:22:21.373920 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-q464m" Dec 05 15:22:21 crc kubenswrapper[4840]: I1205 15:22:21.418945 4840 scope.go:117] "RemoveContainer" containerID="a002d338a42aa2b5a5278756e5ab313d12341e456845c71910b243ac455d29bd" Dec 05 15:22:21 crc kubenswrapper[4840]: I1205 15:22:21.421105 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-q464m"] Dec 05 15:22:21 crc kubenswrapper[4840]: I1205 15:22:21.436799 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-q464m"] Dec 05 15:22:22 crc kubenswrapper[4840]: I1205 15:22:22.085141 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" path="/var/lib/kubelet/pods/9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e/volumes" Dec 05 15:22:30 crc kubenswrapper[4840]: I1205 15:22:30.179084 4840 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pod1c1a5e9e-4618-4e48-beb7-16857a715739"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pod1c1a5e9e-4618-4e48-beb7-16857a715739] : Timed out while waiting for systemd to remove kubepods-burstable-pod1c1a5e9e_4618_4e48_beb7_16857a715739.slice" Dec 05 15:22:31 crc kubenswrapper[4840]: I1205 15:22:31.475374 4840 generic.go:334] "Generic (PLEG): container finished" podID="4b53cdac-e8cf-4dc5-abed-0d20e7ca8140" containerID="4fb3bc939769691873e133cf0da41478d2517abc982418359b6bedaabafe640b" exitCode=0 Dec 05 15:22:31 crc kubenswrapper[4840]: I1205 15:22:31.475463 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140","Type":"ContainerDied","Data":"4fb3bc939769691873e133cf0da41478d2517abc982418359b6bedaabafe640b"} Dec 05 15:22:32 crc kubenswrapper[4840]: I1205 15:22:32.502304 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4b53cdac-e8cf-4dc5-abed-0d20e7ca8140","Type":"ContainerStarted","Data":"467a74ecbba4ae631aa175e6a717c7e73baabd7c6a99d7dcaddd67fb8b9a6731"} Dec 05 15:22:32 crc kubenswrapper[4840]: I1205 15:22:32.502673 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 15:22:32 crc kubenswrapper[4840]: I1205 15:22:32.535244 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.535183511 podStartE2EDuration="36.535183511s" podCreationTimestamp="2025-12-05 15:21:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:22:32.527833404 +0000 UTC m=+1430.868896018" watchObservedRunningTime="2025-12-05 15:22:32.535183511 +0000 UTC m=+1430.876246125" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.324496 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7"] Dec 05 15:22:33 crc kubenswrapper[4840]: E1205 15:22:33.325167 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c96306d-162b-44be-8dfb-fea1280e5644" containerName="init" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.325181 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c96306d-162b-44be-8dfb-fea1280e5644" containerName="init" Dec 05 15:22:33 crc kubenswrapper[4840]: E1205 15:22:33.325272 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerName="init" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.325278 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerName="init" Dec 05 15:22:33 crc kubenswrapper[4840]: E1205 15:22:33.325298 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerName="dnsmasq-dns" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.325306 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerName="dnsmasq-dns" Dec 05 15:22:33 crc kubenswrapper[4840]: E1205 15:22:33.325316 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c96306d-162b-44be-8dfb-fea1280e5644" containerName="dnsmasq-dns" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.325321 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c96306d-162b-44be-8dfb-fea1280e5644" containerName="dnsmasq-dns" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.325516 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f3f66e4-fc7c-464c-9fa3-ec2fb6d2054e" containerName="dnsmasq-dns" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.325538 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c96306d-162b-44be-8dfb-fea1280e5644" containerName="dnsmasq-dns" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.326184 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.333638 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.335954 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.335995 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.335961 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.337508 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7"] Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.438750 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.438878 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.438994 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d94bw\" (UniqueName: \"kubernetes.io/projected/50a15428-e663-42af-a044-01daa7f04c93-kube-api-access-d94bw\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.439032 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.512330 4840 generic.go:334] "Generic (PLEG): container finished" podID="c0b4037e-9bd6-4a53-84b3-941d72023ce3" containerID="529bc60e2df42794f81ed1423725fa3f255ac0e8a7e371363ef440e190806b7f" exitCode=0 Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.512467 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0b4037e-9bd6-4a53-84b3-941d72023ce3","Type":"ContainerDied","Data":"529bc60e2df42794f81ed1423725fa3f255ac0e8a7e371363ef440e190806b7f"} Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.541670 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.541763 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.541838 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d94bw\" (UniqueName: \"kubernetes.io/projected/50a15428-e663-42af-a044-01daa7f04c93-kube-api-access-d94bw\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.541902 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.550064 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.551168 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.552734 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.578854 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d94bw\" (UniqueName: \"kubernetes.io/projected/50a15428-e663-42af-a044-01daa7f04c93-kube-api-access-d94bw\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:33 crc kubenswrapper[4840]: I1205 15:22:33.646453 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:34 crc kubenswrapper[4840]: I1205 15:22:34.197593 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7"] Dec 05 15:22:34 crc kubenswrapper[4840]: W1205 15:22:34.205234 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50a15428_e663_42af_a044_01daa7f04c93.slice/crio-d9aa7fa3072e9c3dd9fca73890fcfd5dc3ac3ed7cb68d0c9bb380afecde3bcfb WatchSource:0}: Error finding container d9aa7fa3072e9c3dd9fca73890fcfd5dc3ac3ed7cb68d0c9bb380afecde3bcfb: Status 404 returned error can't find the container with id d9aa7fa3072e9c3dd9fca73890fcfd5dc3ac3ed7cb68d0c9bb380afecde3bcfb Dec 05 15:22:34 crc kubenswrapper[4840]: I1205 15:22:34.522995 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0b4037e-9bd6-4a53-84b3-941d72023ce3","Type":"ContainerStarted","Data":"dfe27beddb1ea994ee5ef607a4dea9439873bc3b47d98da1204281f87a0d761d"} Dec 05 15:22:34 crc kubenswrapper[4840]: I1205 15:22:34.524338 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:22:34 crc kubenswrapper[4840]: I1205 15:22:34.525095 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" event={"ID":"50a15428-e663-42af-a044-01daa7f04c93","Type":"ContainerStarted","Data":"d9aa7fa3072e9c3dd9fca73890fcfd5dc3ac3ed7cb68d0c9bb380afecde3bcfb"} Dec 05 15:22:34 crc kubenswrapper[4840]: I1205 15:22:34.559623 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.559598685 podStartE2EDuration="36.559598685s" podCreationTimestamp="2025-12-05 15:21:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 15:22:34.551771595 +0000 UTC m=+1432.892834199" watchObservedRunningTime="2025-12-05 15:22:34.559598685 +0000 UTC m=+1432.900661309" Dec 05 15:22:42 crc kubenswrapper[4840]: I1205 15:22:42.976307 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:22:43 crc kubenswrapper[4840]: I1205 15:22:43.654237 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" event={"ID":"50a15428-e663-42af-a044-01daa7f04c93","Type":"ContainerStarted","Data":"4206c6e8b559ded62f9d6097d08be6a9260f4394d6970b87b12d4d8e41c7eb82"} Dec 05 15:22:43 crc kubenswrapper[4840]: I1205 15:22:43.682170 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" podStartSLOduration=1.9173616610000002 podStartE2EDuration="10.682109884s" podCreationTimestamp="2025-12-05 15:22:33 +0000 UTC" firstStartedPulling="2025-12-05 15:22:34.208363722 +0000 UTC m=+1432.549426336" lastFinishedPulling="2025-12-05 15:22:42.973111905 +0000 UTC m=+1441.314174559" observedRunningTime="2025-12-05 15:22:43.677185736 +0000 UTC m=+1442.018248370" watchObservedRunningTime="2025-12-05 15:22:43.682109884 +0000 UTC m=+1442.023172518" Dec 05 15:22:46 crc kubenswrapper[4840]: I1205 15:22:46.537055 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 15:22:48 crc kubenswrapper[4840]: I1205 15:22:48.552103 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 15:22:54 crc kubenswrapper[4840]: I1205 15:22:54.849715 4840 generic.go:334] "Generic (PLEG): container finished" podID="50a15428-e663-42af-a044-01daa7f04c93" containerID="4206c6e8b559ded62f9d6097d08be6a9260f4394d6970b87b12d4d8e41c7eb82" exitCode=0 Dec 05 15:22:54 crc kubenswrapper[4840]: I1205 15:22:54.849823 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" event={"ID":"50a15428-e663-42af-a044-01daa7f04c93","Type":"ContainerDied","Data":"4206c6e8b559ded62f9d6097d08be6a9260f4394d6970b87b12d4d8e41c7eb82"} Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.415588 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.486657 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-ssh-key\") pod \"50a15428-e663-42af-a044-01daa7f04c93\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.486978 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-inventory\") pod \"50a15428-e663-42af-a044-01daa7f04c93\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.487074 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d94bw\" (UniqueName: \"kubernetes.io/projected/50a15428-e663-42af-a044-01daa7f04c93-kube-api-access-d94bw\") pod \"50a15428-e663-42af-a044-01daa7f04c93\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.487101 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-repo-setup-combined-ca-bundle\") pod \"50a15428-e663-42af-a044-01daa7f04c93\" (UID: \"50a15428-e663-42af-a044-01daa7f04c93\") " Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.492965 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50a15428-e663-42af-a044-01daa7f04c93-kube-api-access-d94bw" (OuterVolumeSpecName: "kube-api-access-d94bw") pod "50a15428-e663-42af-a044-01daa7f04c93" (UID: "50a15428-e663-42af-a044-01daa7f04c93"). InnerVolumeSpecName "kube-api-access-d94bw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.495989 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "50a15428-e663-42af-a044-01daa7f04c93" (UID: "50a15428-e663-42af-a044-01daa7f04c93"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.516632 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-inventory" (OuterVolumeSpecName: "inventory") pod "50a15428-e663-42af-a044-01daa7f04c93" (UID: "50a15428-e663-42af-a044-01daa7f04c93"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.516807 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "50a15428-e663-42af-a044-01daa7f04c93" (UID: "50a15428-e663-42af-a044-01daa7f04c93"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.588907 4840 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.588939 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.588950 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/50a15428-e663-42af-a044-01daa7f04c93-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.588975 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d94bw\" (UniqueName: \"kubernetes.io/projected/50a15428-e663-42af-a044-01daa7f04c93-kube-api-access-d94bw\") on node \"crc\" DevicePath \"\"" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.870315 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" event={"ID":"50a15428-e663-42af-a044-01daa7f04c93","Type":"ContainerDied","Data":"d9aa7fa3072e9c3dd9fca73890fcfd5dc3ac3ed7cb68d0c9bb380afecde3bcfb"} Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.870600 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9aa7fa3072e9c3dd9fca73890fcfd5dc3ac3ed7cb68d0c9bb380afecde3bcfb" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.870405 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.939561 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l"] Dec 05 15:22:56 crc kubenswrapper[4840]: E1205 15:22:56.940258 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50a15428-e663-42af-a044-01daa7f04c93" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.940365 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="50a15428-e663-42af-a044-01daa7f04c93" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.940713 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="50a15428-e663-42af-a044-01daa7f04c93" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.941651 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.943722 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.944019 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.944239 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.945640 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.949575 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l"] Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.995416 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.995492 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55p7k\" (UniqueName: \"kubernetes.io/projected/740d3f73-d31d-4a95-9830-ed5545f8525a-kube-api-access-55p7k\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:56 crc kubenswrapper[4840]: I1205 15:22:56.995551 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.098241 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.098378 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55p7k\" (UniqueName: \"kubernetes.io/projected/740d3f73-d31d-4a95-9830-ed5545f8525a-kube-api-access-55p7k\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.098537 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.103070 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.103424 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.126193 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55p7k\" (UniqueName: \"kubernetes.io/projected/740d3f73-d31d-4a95-9830-ed5545f8525a-kube-api-access-55p7k\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4m26l\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.260281 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:22:57 crc kubenswrapper[4840]: I1205 15:22:57.898935 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l"] Dec 05 15:22:58 crc kubenswrapper[4840]: I1205 15:22:58.895123 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" event={"ID":"740d3f73-d31d-4a95-9830-ed5545f8525a","Type":"ContainerStarted","Data":"f6e82c7f11319da83fc0e7fd1c74506e6047cd699d50d1ea78b406545be58779"} Dec 05 15:22:58 crc kubenswrapper[4840]: I1205 15:22:58.895206 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" event={"ID":"740d3f73-d31d-4a95-9830-ed5545f8525a","Type":"ContainerStarted","Data":"9a1a3f3daf6881a4eeffa83c61f9068feb9239c992e262367352f644e1e6859c"} Dec 05 15:22:58 crc kubenswrapper[4840]: I1205 15:22:58.915197 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" podStartSLOduration=2.502313946 podStartE2EDuration="2.915171163s" podCreationTimestamp="2025-12-05 15:22:56 +0000 UTC" firstStartedPulling="2025-12-05 15:22:57.899114823 +0000 UTC m=+1456.240177437" lastFinishedPulling="2025-12-05 15:22:58.31197204 +0000 UTC m=+1456.653034654" observedRunningTime="2025-12-05 15:22:58.912827638 +0000 UTC m=+1457.253890272" watchObservedRunningTime="2025-12-05 15:22:58.915171163 +0000 UTC m=+1457.256233797" Dec 05 15:23:01 crc kubenswrapper[4840]: I1205 15:23:01.929963 4840 generic.go:334] "Generic (PLEG): container finished" podID="740d3f73-d31d-4a95-9830-ed5545f8525a" containerID="f6e82c7f11319da83fc0e7fd1c74506e6047cd699d50d1ea78b406545be58779" exitCode=0 Dec 05 15:23:01 crc kubenswrapper[4840]: I1205 15:23:01.930076 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" event={"ID":"740d3f73-d31d-4a95-9830-ed5545f8525a","Type":"ContainerDied","Data":"f6e82c7f11319da83fc0e7fd1c74506e6047cd699d50d1ea78b406545be58779"} Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.387962 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.471236 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-inventory\") pod \"740d3f73-d31d-4a95-9830-ed5545f8525a\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.471540 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-ssh-key\") pod \"740d3f73-d31d-4a95-9830-ed5545f8525a\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.471801 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55p7k\" (UniqueName: \"kubernetes.io/projected/740d3f73-d31d-4a95-9830-ed5545f8525a-kube-api-access-55p7k\") pod \"740d3f73-d31d-4a95-9830-ed5545f8525a\" (UID: \"740d3f73-d31d-4a95-9830-ed5545f8525a\") " Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.477358 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/740d3f73-d31d-4a95-9830-ed5545f8525a-kube-api-access-55p7k" (OuterVolumeSpecName: "kube-api-access-55p7k") pod "740d3f73-d31d-4a95-9830-ed5545f8525a" (UID: "740d3f73-d31d-4a95-9830-ed5545f8525a"). InnerVolumeSpecName "kube-api-access-55p7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.498552 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-inventory" (OuterVolumeSpecName: "inventory") pod "740d3f73-d31d-4a95-9830-ed5545f8525a" (UID: "740d3f73-d31d-4a95-9830-ed5545f8525a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.508846 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "740d3f73-d31d-4a95-9830-ed5545f8525a" (UID: "740d3f73-d31d-4a95-9830-ed5545f8525a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.573574 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55p7k\" (UniqueName: \"kubernetes.io/projected/740d3f73-d31d-4a95-9830-ed5545f8525a-kube-api-access-55p7k\") on node \"crc\" DevicePath \"\"" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.573607 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.573616 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/740d3f73-d31d-4a95-9830-ed5545f8525a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.952474 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" event={"ID":"740d3f73-d31d-4a95-9830-ed5545f8525a","Type":"ContainerDied","Data":"9a1a3f3daf6881a4eeffa83c61f9068feb9239c992e262367352f644e1e6859c"} Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.952531 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a1a3f3daf6881a4eeffa83c61f9068feb9239c992e262367352f644e1e6859c" Dec 05 15:23:03 crc kubenswrapper[4840]: I1205 15:23:03.952553 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4m26l" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.097776 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt"] Dec 05 15:23:04 crc kubenswrapper[4840]: E1205 15:23:04.098277 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="740d3f73-d31d-4a95-9830-ed5545f8525a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.098305 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="740d3f73-d31d-4a95-9830-ed5545f8525a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.098536 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="740d3f73-d31d-4a95-9830-ed5545f8525a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.099213 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.101816 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.102286 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.102477 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.110289 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.110611 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt"] Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.184970 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.185060 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4b9m\" (UniqueName: \"kubernetes.io/projected/3298a054-72de-4060-95c4-ff42a8ed3a7f-kube-api-access-g4b9m\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.185094 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.185118 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.286367 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.286418 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4b9m\" (UniqueName: \"kubernetes.io/projected/3298a054-72de-4060-95c4-ff42a8ed3a7f-kube-api-access-g4b9m\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.286449 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.286473 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.290494 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.290698 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.293279 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.302993 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4b9m\" (UniqueName: \"kubernetes.io/projected/3298a054-72de-4060-95c4-ff42a8ed3a7f-kube-api-access-g4b9m\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.421007 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.936591 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt"] Dec 05 15:23:04 crc kubenswrapper[4840]: I1205 15:23:04.963526 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" event={"ID":"3298a054-72de-4060-95c4-ff42a8ed3a7f","Type":"ContainerStarted","Data":"6c715f4991d5a8129c2303094b13779b2b4ffaa8a7d28e40f74b7a48ee1b2523"} Dec 05 15:23:06 crc kubenswrapper[4840]: I1205 15:23:06.010030 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" event={"ID":"3298a054-72de-4060-95c4-ff42a8ed3a7f","Type":"ContainerStarted","Data":"50c4eaeba5581b9fba173f1170e40483ea861e7c5359b7a70ef6af8e5d415a7c"} Dec 05 15:23:06 crc kubenswrapper[4840]: I1205 15:23:06.027187 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" podStartSLOduration=1.645371451 podStartE2EDuration="2.027154984s" podCreationTimestamp="2025-12-05 15:23:04 +0000 UTC" firstStartedPulling="2025-12-05 15:23:04.938335073 +0000 UTC m=+1463.279397677" lastFinishedPulling="2025-12-05 15:23:05.320118556 +0000 UTC m=+1463.661181210" observedRunningTime="2025-12-05 15:23:06.025393984 +0000 UTC m=+1464.366456608" watchObservedRunningTime="2025-12-05 15:23:06.027154984 +0000 UTC m=+1464.368217638" Dec 05 15:23:16 crc kubenswrapper[4840]: I1205 15:23:16.091935 4840 scope.go:117] "RemoveContainer" containerID="be2de59156e98ca002e13a6716bcb93f683e2804f791db487ccae2bdf55580ba" Dec 05 15:23:16 crc kubenswrapper[4840]: I1205 15:23:16.139074 4840 scope.go:117] "RemoveContainer" containerID="1e74c74674a33aa47a2253145689571d0cc8a4e98b2644024a619ecd93e158c2" Dec 05 15:23:16 crc kubenswrapper[4840]: I1205 15:23:16.188822 4840 scope.go:117] "RemoveContainer" containerID="4d041a905acc7c052bc51380ecff9a15d22058aa5c28356f2aed639188cd6158" Dec 05 15:23:16 crc kubenswrapper[4840]: I1205 15:23:16.225635 4840 scope.go:117] "RemoveContainer" containerID="0a3e6f3200f151a74a5f6dedf9b48a7b0f81d62ca7d8f4a6165dbfa27d38c4a7" Dec 05 15:23:16 crc kubenswrapper[4840]: I1205 15:23:16.274453 4840 scope.go:117] "RemoveContainer" containerID="298650201e59ffec2ec45ee76964aaca6c82f53b6d7bfa62b6af15c17019e837" Dec 05 15:23:49 crc kubenswrapper[4840]: I1205 15:23:49.472225 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:23:49 crc kubenswrapper[4840]: I1205 15:23:49.472792 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:24:16 crc kubenswrapper[4840]: I1205 15:24:16.394137 4840 scope.go:117] "RemoveContainer" containerID="959c31b1df96cbf3954e3394e2e3259bbf0a197a97f5e49d032d347c25242994" Dec 05 15:24:16 crc kubenswrapper[4840]: I1205 15:24:16.431236 4840 scope.go:117] "RemoveContainer" containerID="ac75e4d88c6f325fe89f2f2322280dca678553a071c8e13b4e8a309f30d8d211" Dec 05 15:24:19 crc kubenswrapper[4840]: I1205 15:24:19.472093 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:24:19 crc kubenswrapper[4840]: I1205 15:24:19.472723 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.471518 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.472148 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.472206 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.473012 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.473293 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" gracePeriod=600 Dec 05 15:24:49 crc kubenswrapper[4840]: E1205 15:24:49.602067 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.859952 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" exitCode=0 Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.860027 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b"} Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.860297 4840 scope.go:117] "RemoveContainer" containerID="40055bc2420888638e5359189b9a9948a0cbd2aa70e5300e22d9c614bb6d6f19" Dec 05 15:24:49 crc kubenswrapper[4840]: I1205 15:24:49.860697 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:24:49 crc kubenswrapper[4840]: E1205 15:24:49.860972 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:25:04 crc kubenswrapper[4840]: I1205 15:25:04.067495 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:25:04 crc kubenswrapper[4840]: E1205 15:25:04.068639 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.129639 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cbwx7"] Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.135514 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.141537 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cbwx7"] Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.214880 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-utilities\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.214950 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn9mg\" (UniqueName: \"kubernetes.io/projected/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-kube-api-access-gn9mg\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.214992 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-catalog-content\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.317176 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-utilities\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.317237 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn9mg\" (UniqueName: \"kubernetes.io/projected/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-kube-api-access-gn9mg\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.317263 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-catalog-content\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.317948 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-utilities\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.317996 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-catalog-content\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.467891 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn9mg\" (UniqueName: \"kubernetes.io/projected/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-kube-api-access-gn9mg\") pod \"community-operators-cbwx7\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:11 crc kubenswrapper[4840]: I1205 15:25:11.763648 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:12 crc kubenswrapper[4840]: I1205 15:25:12.251434 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cbwx7"] Dec 05 15:25:12 crc kubenswrapper[4840]: I1205 15:25:12.615027 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbwx7" event={"ID":"33504b7a-7bf3-4189-bdb2-9c060ef86fc1","Type":"ContainerStarted","Data":"e97f481833439f6ce5370f0d672970e905e0f6e6dc4dcf3d7aa324792f79932d"} Dec 05 15:25:13 crc kubenswrapper[4840]: I1205 15:25:13.626452 4840 generic.go:334] "Generic (PLEG): container finished" podID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerID="07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96" exitCode=0 Dec 05 15:25:13 crc kubenswrapper[4840]: I1205 15:25:13.626565 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbwx7" event={"ID":"33504b7a-7bf3-4189-bdb2-9c060ef86fc1","Type":"ContainerDied","Data":"07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96"} Dec 05 15:25:13 crc kubenswrapper[4840]: I1205 15:25:13.629321 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:25:14 crc kubenswrapper[4840]: I1205 15:25:14.690046 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbwx7" event={"ID":"33504b7a-7bf3-4189-bdb2-9c060ef86fc1","Type":"ContainerStarted","Data":"fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92"} Dec 05 15:25:15 crc kubenswrapper[4840]: I1205 15:25:15.700683 4840 generic.go:334] "Generic (PLEG): container finished" podID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerID="fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92" exitCode=0 Dec 05 15:25:15 crc kubenswrapper[4840]: I1205 15:25:15.701269 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbwx7" event={"ID":"33504b7a-7bf3-4189-bdb2-9c060ef86fc1","Type":"ContainerDied","Data":"fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92"} Dec 05 15:25:16 crc kubenswrapper[4840]: I1205 15:25:16.509434 4840 scope.go:117] "RemoveContainer" containerID="4962356fe991ffa318ba7d812198dd9cfb24ee68301defa6743104c44f2c4238" Dec 05 15:25:16 crc kubenswrapper[4840]: I1205 15:25:16.542735 4840 scope.go:117] "RemoveContainer" containerID="adfee7236fefc58166b18948e7fb8930431957f89c8f13358d96962c6f1a4fde" Dec 05 15:25:16 crc kubenswrapper[4840]: I1205 15:25:16.714658 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbwx7" event={"ID":"33504b7a-7bf3-4189-bdb2-9c060ef86fc1","Type":"ContainerStarted","Data":"d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a"} Dec 05 15:25:16 crc kubenswrapper[4840]: I1205 15:25:16.737372 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cbwx7" podStartSLOduration=2.889283037 podStartE2EDuration="5.737339065s" podCreationTimestamp="2025-12-05 15:25:11 +0000 UTC" firstStartedPulling="2025-12-05 15:25:13.628951213 +0000 UTC m=+1591.970013827" lastFinishedPulling="2025-12-05 15:25:16.477007241 +0000 UTC m=+1594.818069855" observedRunningTime="2025-12-05 15:25:16.729539205 +0000 UTC m=+1595.070601849" watchObservedRunningTime="2025-12-05 15:25:16.737339065 +0000 UTC m=+1595.078401669" Dec 05 15:25:19 crc kubenswrapper[4840]: I1205 15:25:19.071690 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:25:19 crc kubenswrapper[4840]: E1205 15:25:19.072134 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:25:21 crc kubenswrapper[4840]: I1205 15:25:21.764252 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:21 crc kubenswrapper[4840]: I1205 15:25:21.764643 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:21 crc kubenswrapper[4840]: I1205 15:25:21.857862 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:22 crc kubenswrapper[4840]: I1205 15:25:22.811855 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:22 crc kubenswrapper[4840]: I1205 15:25:22.861939 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cbwx7"] Dec 05 15:25:24 crc kubenswrapper[4840]: I1205 15:25:24.781893 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cbwx7" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="registry-server" containerID="cri-o://d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a" gracePeriod=2 Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.319307 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.478374 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-utilities\") pod \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.478534 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-catalog-content\") pod \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.478633 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gn9mg\" (UniqueName: \"kubernetes.io/projected/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-kube-api-access-gn9mg\") pod \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\" (UID: \"33504b7a-7bf3-4189-bdb2-9c060ef86fc1\") " Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.479946 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-utilities" (OuterVolumeSpecName: "utilities") pod "33504b7a-7bf3-4189-bdb2-9c060ef86fc1" (UID: "33504b7a-7bf3-4189-bdb2-9c060ef86fc1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.485600 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-kube-api-access-gn9mg" (OuterVolumeSpecName: "kube-api-access-gn9mg") pod "33504b7a-7bf3-4189-bdb2-9c060ef86fc1" (UID: "33504b7a-7bf3-4189-bdb2-9c060ef86fc1"). InnerVolumeSpecName "kube-api-access-gn9mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.535578 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "33504b7a-7bf3-4189-bdb2-9c060ef86fc1" (UID: "33504b7a-7bf3-4189-bdb2-9c060ef86fc1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.581250 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.581298 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.581308 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gn9mg\" (UniqueName: \"kubernetes.io/projected/33504b7a-7bf3-4189-bdb2-9c060ef86fc1-kube-api-access-gn9mg\") on node \"crc\" DevicePath \"\"" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.795312 4840 generic.go:334] "Generic (PLEG): container finished" podID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerID="d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a" exitCode=0 Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.795358 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbwx7" event={"ID":"33504b7a-7bf3-4189-bdb2-9c060ef86fc1","Type":"ContainerDied","Data":"d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a"} Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.795413 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cbwx7" event={"ID":"33504b7a-7bf3-4189-bdb2-9c060ef86fc1","Type":"ContainerDied","Data":"e97f481833439f6ce5370f0d672970e905e0f6e6dc4dcf3d7aa324792f79932d"} Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.795410 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cbwx7" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.795527 4840 scope.go:117] "RemoveContainer" containerID="d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.828017 4840 scope.go:117] "RemoveContainer" containerID="fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.839711 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cbwx7"] Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.850142 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cbwx7"] Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.867059 4840 scope.go:117] "RemoveContainer" containerID="07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.904785 4840 scope.go:117] "RemoveContainer" containerID="d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a" Dec 05 15:25:25 crc kubenswrapper[4840]: E1205 15:25:25.905286 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a\": container with ID starting with d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a not found: ID does not exist" containerID="d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.905331 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a"} err="failed to get container status \"d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a\": rpc error: code = NotFound desc = could not find container \"d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a\": container with ID starting with d21b0739c2795c5ec1dd309f8b9e18738a52633d8fa496b248b2e0fa5c1c646a not found: ID does not exist" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.905361 4840 scope.go:117] "RemoveContainer" containerID="fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92" Dec 05 15:25:25 crc kubenswrapper[4840]: E1205 15:25:25.905726 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92\": container with ID starting with fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92 not found: ID does not exist" containerID="fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.905749 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92"} err="failed to get container status \"fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92\": rpc error: code = NotFound desc = could not find container \"fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92\": container with ID starting with fb5715bc00d6223ebb9e254cc6290065a022639c0cabf25780be197323c24c92 not found: ID does not exist" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.905762 4840 scope.go:117] "RemoveContainer" containerID="07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96" Dec 05 15:25:25 crc kubenswrapper[4840]: E1205 15:25:25.906193 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96\": container with ID starting with 07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96 not found: ID does not exist" containerID="07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96" Dec 05 15:25:25 crc kubenswrapper[4840]: I1205 15:25:25.906221 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96"} err="failed to get container status \"07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96\": rpc error: code = NotFound desc = could not find container \"07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96\": container with ID starting with 07261edb4677836aa7cd0e96ed418b31e36fc9b8f11070ec3acbfa0ff9019d96 not found: ID does not exist" Dec 05 15:25:26 crc kubenswrapper[4840]: I1205 15:25:26.078943 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" path="/var/lib/kubelet/pods/33504b7a-7bf3-4189-bdb2-9c060ef86fc1/volumes" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.578656 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7mwf7"] Dec 05 15:25:27 crc kubenswrapper[4840]: E1205 15:25:27.579820 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="registry-server" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.579853 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="registry-server" Dec 05 15:25:27 crc kubenswrapper[4840]: E1205 15:25:27.579959 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="extract-utilities" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.579982 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="extract-utilities" Dec 05 15:25:27 crc kubenswrapper[4840]: E1205 15:25:27.580027 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="extract-content" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.580046 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="extract-content" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.580462 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="33504b7a-7bf3-4189-bdb2-9c060ef86fc1" containerName="registry-server" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.583790 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.627398 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mwf7"] Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.749297 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swmlj\" (UniqueName: \"kubernetes.io/projected/d8153f52-1255-4593-8136-083c0a618d49-kube-api-access-swmlj\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.749517 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8153f52-1255-4593-8136-083c0a618d49-catalog-content\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.749752 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8153f52-1255-4593-8136-083c0a618d49-utilities\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.850949 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8153f52-1255-4593-8136-083c0a618d49-utilities\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.851015 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swmlj\" (UniqueName: \"kubernetes.io/projected/d8153f52-1255-4593-8136-083c0a618d49-kube-api-access-swmlj\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.851143 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8153f52-1255-4593-8136-083c0a618d49-catalog-content\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.851611 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8153f52-1255-4593-8136-083c0a618d49-utilities\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.851813 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8153f52-1255-4593-8136-083c0a618d49-catalog-content\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.870669 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swmlj\" (UniqueName: \"kubernetes.io/projected/d8153f52-1255-4593-8136-083c0a618d49-kube-api-access-swmlj\") pod \"certified-operators-7mwf7\" (UID: \"d8153f52-1255-4593-8136-083c0a618d49\") " pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:27 crc kubenswrapper[4840]: I1205 15:25:27.950260 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:28 crc kubenswrapper[4840]: I1205 15:25:28.493516 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mwf7"] Dec 05 15:25:28 crc kubenswrapper[4840]: I1205 15:25:28.860142 4840 generic.go:334] "Generic (PLEG): container finished" podID="d8153f52-1255-4593-8136-083c0a618d49" containerID="a8cd8940e804483b42180f152f73c5859b8c1373384f7170c825c9fca1b6b04c" exitCode=0 Dec 05 15:25:28 crc kubenswrapper[4840]: I1205 15:25:28.860233 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mwf7" event={"ID":"d8153f52-1255-4593-8136-083c0a618d49","Type":"ContainerDied","Data":"a8cd8940e804483b42180f152f73c5859b8c1373384f7170c825c9fca1b6b04c"} Dec 05 15:25:28 crc kubenswrapper[4840]: I1205 15:25:28.861047 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mwf7" event={"ID":"d8153f52-1255-4593-8136-083c0a618d49","Type":"ContainerStarted","Data":"d88f87c70b44ab95b3767e87d66a537eff4ec414337e41ba2a2b6763a2f1f302"} Dec 05 15:25:32 crc kubenswrapper[4840]: I1205 15:25:32.072892 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:25:32 crc kubenswrapper[4840]: E1205 15:25:32.073261 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:25:35 crc kubenswrapper[4840]: I1205 15:25:35.941239 4840 generic.go:334] "Generic (PLEG): container finished" podID="d8153f52-1255-4593-8136-083c0a618d49" containerID="6ea2a92fa9d36ebfa832354d473108d8ed9a42ddcf257e62f320c5cadeee76f1" exitCode=0 Dec 05 15:25:35 crc kubenswrapper[4840]: I1205 15:25:35.941377 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mwf7" event={"ID":"d8153f52-1255-4593-8136-083c0a618d49","Type":"ContainerDied","Data":"6ea2a92fa9d36ebfa832354d473108d8ed9a42ddcf257e62f320c5cadeee76f1"} Dec 05 15:25:36 crc kubenswrapper[4840]: I1205 15:25:36.952582 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mwf7" event={"ID":"d8153f52-1255-4593-8136-083c0a618d49","Type":"ContainerStarted","Data":"91ef7ac3d62fab55139f58cafded33094094f3d05703e40dbc7c331740f71feb"} Dec 05 15:25:36 crc kubenswrapper[4840]: I1205 15:25:36.978649 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7mwf7" podStartSLOduration=2.515195602 podStartE2EDuration="9.97862421s" podCreationTimestamp="2025-12-05 15:25:27 +0000 UTC" firstStartedPulling="2025-12-05 15:25:28.86211498 +0000 UTC m=+1607.203177614" lastFinishedPulling="2025-12-05 15:25:36.325543608 +0000 UTC m=+1614.666606222" observedRunningTime="2025-12-05 15:25:36.968720431 +0000 UTC m=+1615.309783045" watchObservedRunningTime="2025-12-05 15:25:36.97862421 +0000 UTC m=+1615.319686824" Dec 05 15:25:37 crc kubenswrapper[4840]: I1205 15:25:37.951067 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:37 crc kubenswrapper[4840]: I1205 15:25:37.951436 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:38 crc kubenswrapper[4840]: I1205 15:25:38.007056 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:46 crc kubenswrapper[4840]: I1205 15:25:46.067576 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:25:46 crc kubenswrapper[4840]: E1205 15:25:46.068306 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.016782 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7mwf7" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.084768 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mwf7"] Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.133030 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6pn9"] Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.133282 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x6pn9" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="registry-server" containerID="cri-o://c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020" gracePeriod=2 Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.585020 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.712133 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-utilities\") pod \"1d10a78b-740a-45ae-bb58-1d2802630d99\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.712210 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-catalog-content\") pod \"1d10a78b-740a-45ae-bb58-1d2802630d99\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.712293 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zx6c\" (UniqueName: \"kubernetes.io/projected/1d10a78b-740a-45ae-bb58-1d2802630d99-kube-api-access-2zx6c\") pod \"1d10a78b-740a-45ae-bb58-1d2802630d99\" (UID: \"1d10a78b-740a-45ae-bb58-1d2802630d99\") " Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.713532 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-utilities" (OuterVolumeSpecName: "utilities") pod "1d10a78b-740a-45ae-bb58-1d2802630d99" (UID: "1d10a78b-740a-45ae-bb58-1d2802630d99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.722025 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d10a78b-740a-45ae-bb58-1d2802630d99-kube-api-access-2zx6c" (OuterVolumeSpecName: "kube-api-access-2zx6c") pod "1d10a78b-740a-45ae-bb58-1d2802630d99" (UID: "1d10a78b-740a-45ae-bb58-1d2802630d99"). InnerVolumeSpecName "kube-api-access-2zx6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.762487 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d10a78b-740a-45ae-bb58-1d2802630d99" (UID: "1d10a78b-740a-45ae-bb58-1d2802630d99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.814940 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zx6c\" (UniqueName: \"kubernetes.io/projected/1d10a78b-740a-45ae-bb58-1d2802630d99-kube-api-access-2zx6c\") on node \"crc\" DevicePath \"\"" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.814988 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:25:48 crc kubenswrapper[4840]: I1205 15:25:48.815000 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d10a78b-740a-45ae-bb58-1d2802630d99-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.073298 4840 generic.go:334] "Generic (PLEG): container finished" podID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerID="c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020" exitCode=0 Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.073359 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6pn9" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.073360 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6pn9" event={"ID":"1d10a78b-740a-45ae-bb58-1d2802630d99","Type":"ContainerDied","Data":"c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020"} Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.073414 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6pn9" event={"ID":"1d10a78b-740a-45ae-bb58-1d2802630d99","Type":"ContainerDied","Data":"cad8c8c3fb7f0b7a36733b0453e17c55692bc5207b361c25c3b2c7d401b166bc"} Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.073437 4840 scope.go:117] "RemoveContainer" containerID="c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.118691 4840 scope.go:117] "RemoveContainer" containerID="7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.147835 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6pn9"] Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.160246 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x6pn9"] Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.173078 4840 scope.go:117] "RemoveContainer" containerID="826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.219765 4840 scope.go:117] "RemoveContainer" containerID="c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020" Dec 05 15:25:49 crc kubenswrapper[4840]: E1205 15:25:49.220269 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020\": container with ID starting with c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020 not found: ID does not exist" containerID="c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.220309 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020"} err="failed to get container status \"c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020\": rpc error: code = NotFound desc = could not find container \"c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020\": container with ID starting with c619b935eb01018aa196da3fba59ac77fad5640f1e1b9e28a64eab087d817020 not found: ID does not exist" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.220336 4840 scope.go:117] "RemoveContainer" containerID="7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6" Dec 05 15:25:49 crc kubenswrapper[4840]: E1205 15:25:49.220635 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6\": container with ID starting with 7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6 not found: ID does not exist" containerID="7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.220658 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6"} err="failed to get container status \"7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6\": rpc error: code = NotFound desc = could not find container \"7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6\": container with ID starting with 7dc1e68fb05e06b82d7e588a8aa3e6cf94b7020f829629cb18fd89d09d420dc6 not found: ID does not exist" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.220671 4840 scope.go:117] "RemoveContainer" containerID="826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc" Dec 05 15:25:49 crc kubenswrapper[4840]: E1205 15:25:49.220951 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc\": container with ID starting with 826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc not found: ID does not exist" containerID="826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc" Dec 05 15:25:49 crc kubenswrapper[4840]: I1205 15:25:49.220973 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc"} err="failed to get container status \"826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc\": rpc error: code = NotFound desc = could not find container \"826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc\": container with ID starting with 826c58663e5a467af8dbc89a696f4e649e9e7b4847bcccd592b4ea1b3ec67dfc not found: ID does not exist" Dec 05 15:25:50 crc kubenswrapper[4840]: I1205 15:25:50.079799 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" path="/var/lib/kubelet/pods/1d10a78b-740a-45ae-bb58-1d2802630d99/volumes" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.195975 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5wbjh"] Dec 05 15:25:58 crc kubenswrapper[4840]: E1205 15:25:58.197388 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="registry-server" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.197410 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="registry-server" Dec 05 15:25:58 crc kubenswrapper[4840]: E1205 15:25:58.197427 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="extract-utilities" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.197434 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="extract-utilities" Dec 05 15:25:58 crc kubenswrapper[4840]: E1205 15:25:58.197442 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="extract-content" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.197450 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="extract-content" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.197690 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d10a78b-740a-45ae-bb58-1d2802630d99" containerName="registry-server" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.199797 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.222051 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wbjh"] Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.258635 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-catalog-content\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.258718 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmg4b\" (UniqueName: \"kubernetes.io/projected/3764c586-77d9-4b89-b0f0-b4ef0043da49-kube-api-access-fmg4b\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.258829 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-utilities\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.360572 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmg4b\" (UniqueName: \"kubernetes.io/projected/3764c586-77d9-4b89-b0f0-b4ef0043da49-kube-api-access-fmg4b\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.360686 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-utilities\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.360844 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-catalog-content\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.361488 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-catalog-content\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.361558 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-utilities\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.391958 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmg4b\" (UniqueName: \"kubernetes.io/projected/3764c586-77d9-4b89-b0f0-b4ef0043da49-kube-api-access-fmg4b\") pod \"redhat-marketplace-5wbjh\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:58 crc kubenswrapper[4840]: I1205 15:25:58.539219 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:25:59 crc kubenswrapper[4840]: I1205 15:25:59.025069 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wbjh"] Dec 05 15:25:59 crc kubenswrapper[4840]: W1205 15:25:59.031848 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3764c586_77d9_4b89_b0f0_b4ef0043da49.slice/crio-ac99ef294894a477c3fb89ca86968aeb98b7f3ac9de09c4e8efa53358eb75c80 WatchSource:0}: Error finding container ac99ef294894a477c3fb89ca86968aeb98b7f3ac9de09c4e8efa53358eb75c80: Status 404 returned error can't find the container with id ac99ef294894a477c3fb89ca86968aeb98b7f3ac9de09c4e8efa53358eb75c80 Dec 05 15:25:59 crc kubenswrapper[4840]: I1205 15:25:59.213030 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wbjh" event={"ID":"3764c586-77d9-4b89-b0f0-b4ef0043da49","Type":"ContainerStarted","Data":"ac99ef294894a477c3fb89ca86968aeb98b7f3ac9de09c4e8efa53358eb75c80"} Dec 05 15:26:00 crc kubenswrapper[4840]: I1205 15:26:00.066910 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:26:00 crc kubenswrapper[4840]: E1205 15:26:00.067832 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:26:00 crc kubenswrapper[4840]: I1205 15:26:00.225202 4840 generic.go:334] "Generic (PLEG): container finished" podID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerID="ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3" exitCode=0 Dec 05 15:26:00 crc kubenswrapper[4840]: I1205 15:26:00.225272 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wbjh" event={"ID":"3764c586-77d9-4b89-b0f0-b4ef0043da49","Type":"ContainerDied","Data":"ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3"} Dec 05 15:26:01 crc kubenswrapper[4840]: I1205 15:26:01.235776 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wbjh" event={"ID":"3764c586-77d9-4b89-b0f0-b4ef0043da49","Type":"ContainerStarted","Data":"3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6"} Dec 05 15:26:02 crc kubenswrapper[4840]: I1205 15:26:02.245545 4840 generic.go:334] "Generic (PLEG): container finished" podID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerID="3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6" exitCode=0 Dec 05 15:26:02 crc kubenswrapper[4840]: I1205 15:26:02.245642 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wbjh" event={"ID":"3764c586-77d9-4b89-b0f0-b4ef0043da49","Type":"ContainerDied","Data":"3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6"} Dec 05 15:26:03 crc kubenswrapper[4840]: I1205 15:26:03.260556 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wbjh" event={"ID":"3764c586-77d9-4b89-b0f0-b4ef0043da49","Type":"ContainerStarted","Data":"791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c"} Dec 05 15:26:03 crc kubenswrapper[4840]: I1205 15:26:03.291044 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5wbjh" podStartSLOduration=2.879141147 podStartE2EDuration="5.291022054s" podCreationTimestamp="2025-12-05 15:25:58 +0000 UTC" firstStartedPulling="2025-12-05 15:26:00.227659759 +0000 UTC m=+1638.568722383" lastFinishedPulling="2025-12-05 15:26:02.639540676 +0000 UTC m=+1640.980603290" observedRunningTime="2025-12-05 15:26:03.283483651 +0000 UTC m=+1641.624546275" watchObservedRunningTime="2025-12-05 15:26:03.291022054 +0000 UTC m=+1641.632084668" Dec 05 15:26:08 crc kubenswrapper[4840]: I1205 15:26:08.539857 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:26:08 crc kubenswrapper[4840]: I1205 15:26:08.541020 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:26:08 crc kubenswrapper[4840]: I1205 15:26:08.607851 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:26:09 crc kubenswrapper[4840]: I1205 15:26:09.372370 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:26:09 crc kubenswrapper[4840]: I1205 15:26:09.426733 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wbjh"] Dec 05 15:26:11 crc kubenswrapper[4840]: I1205 15:26:11.067084 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:26:11 crc kubenswrapper[4840]: E1205 15:26:11.067531 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:26:11 crc kubenswrapper[4840]: I1205 15:26:11.368553 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5wbjh" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="registry-server" containerID="cri-o://791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c" gracePeriod=2 Dec 05 15:26:11 crc kubenswrapper[4840]: I1205 15:26:11.897707 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.048989 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-utilities\") pod \"3764c586-77d9-4b89-b0f0-b4ef0043da49\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.049129 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmg4b\" (UniqueName: \"kubernetes.io/projected/3764c586-77d9-4b89-b0f0-b4ef0043da49-kube-api-access-fmg4b\") pod \"3764c586-77d9-4b89-b0f0-b4ef0043da49\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.049172 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-catalog-content\") pod \"3764c586-77d9-4b89-b0f0-b4ef0043da49\" (UID: \"3764c586-77d9-4b89-b0f0-b4ef0043da49\") " Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.050177 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-utilities" (OuterVolumeSpecName: "utilities") pod "3764c586-77d9-4b89-b0f0-b4ef0043da49" (UID: "3764c586-77d9-4b89-b0f0-b4ef0043da49"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.057193 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3764c586-77d9-4b89-b0f0-b4ef0043da49-kube-api-access-fmg4b" (OuterVolumeSpecName: "kube-api-access-fmg4b") pod "3764c586-77d9-4b89-b0f0-b4ef0043da49" (UID: "3764c586-77d9-4b89-b0f0-b4ef0043da49"). InnerVolumeSpecName "kube-api-access-fmg4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.072301 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3764c586-77d9-4b89-b0f0-b4ef0043da49" (UID: "3764c586-77d9-4b89-b0f0-b4ef0043da49"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.152485 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.152558 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmg4b\" (UniqueName: \"kubernetes.io/projected/3764c586-77d9-4b89-b0f0-b4ef0043da49-kube-api-access-fmg4b\") on node \"crc\" DevicePath \"\"" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.152647 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3764c586-77d9-4b89-b0f0-b4ef0043da49-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.379397 4840 generic.go:334] "Generic (PLEG): container finished" podID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerID="791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c" exitCode=0 Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.379443 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wbjh" event={"ID":"3764c586-77d9-4b89-b0f0-b4ef0043da49","Type":"ContainerDied","Data":"791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c"} Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.379476 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5wbjh" event={"ID":"3764c586-77d9-4b89-b0f0-b4ef0043da49","Type":"ContainerDied","Data":"ac99ef294894a477c3fb89ca86968aeb98b7f3ac9de09c4e8efa53358eb75c80"} Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.379476 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5wbjh" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.379494 4840 scope.go:117] "RemoveContainer" containerID="791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.406090 4840 scope.go:117] "RemoveContainer" containerID="3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.408772 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wbjh"] Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.417523 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5wbjh"] Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.429330 4840 scope.go:117] "RemoveContainer" containerID="ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.475639 4840 scope.go:117] "RemoveContainer" containerID="791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c" Dec 05 15:26:12 crc kubenswrapper[4840]: E1205 15:26:12.476301 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c\": container with ID starting with 791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c not found: ID does not exist" containerID="791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.476335 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c"} err="failed to get container status \"791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c\": rpc error: code = NotFound desc = could not find container \"791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c\": container with ID starting with 791b25a59ce837afb48255cf9ac9562f70f4ca2caccee7c0df05a2a8de602d7c not found: ID does not exist" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.476357 4840 scope.go:117] "RemoveContainer" containerID="3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6" Dec 05 15:26:12 crc kubenswrapper[4840]: E1205 15:26:12.477557 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6\": container with ID starting with 3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6 not found: ID does not exist" containerID="3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.477613 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6"} err="failed to get container status \"3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6\": rpc error: code = NotFound desc = could not find container \"3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6\": container with ID starting with 3cc6ddd20babf90fced50aab808a21a760d2e7c005a1e5e5c7cdae949259aab6 not found: ID does not exist" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.477650 4840 scope.go:117] "RemoveContainer" containerID="ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3" Dec 05 15:26:12 crc kubenswrapper[4840]: E1205 15:26:12.478191 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3\": container with ID starting with ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3 not found: ID does not exist" containerID="ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3" Dec 05 15:26:12 crc kubenswrapper[4840]: I1205 15:26:12.478212 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3"} err="failed to get container status \"ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3\": rpc error: code = NotFound desc = could not find container \"ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3\": container with ID starting with ee0ee565223292153d92a872f299db93ace4b18ee76c0d1b0927930ae2a0a8a3 not found: ID does not exist" Dec 05 15:26:14 crc kubenswrapper[4840]: I1205 15:26:14.077739 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" path="/var/lib/kubelet/pods/3764c586-77d9-4b89-b0f0-b4ef0043da49/volumes" Dec 05 15:26:24 crc kubenswrapper[4840]: I1205 15:26:24.086519 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:26:24 crc kubenswrapper[4840]: E1205 15:26:24.087578 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:26:31 crc kubenswrapper[4840]: I1205 15:26:31.556134 4840 generic.go:334] "Generic (PLEG): container finished" podID="3298a054-72de-4060-95c4-ff42a8ed3a7f" containerID="50c4eaeba5581b9fba173f1170e40483ea861e7c5359b7a70ef6af8e5d415a7c" exitCode=0 Dec 05 15:26:31 crc kubenswrapper[4840]: I1205 15:26:31.556272 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" event={"ID":"3298a054-72de-4060-95c4-ff42a8ed3a7f","Type":"ContainerDied","Data":"50c4eaeba5581b9fba173f1170e40483ea861e7c5359b7a70ef6af8e5d415a7c"} Dec 05 15:26:32 crc kubenswrapper[4840]: I1205 15:26:32.964658 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.157914 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-inventory\") pod \"3298a054-72de-4060-95c4-ff42a8ed3a7f\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.158010 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-bootstrap-combined-ca-bundle\") pod \"3298a054-72de-4060-95c4-ff42a8ed3a7f\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.158034 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4b9m\" (UniqueName: \"kubernetes.io/projected/3298a054-72de-4060-95c4-ff42a8ed3a7f-kube-api-access-g4b9m\") pod \"3298a054-72de-4060-95c4-ff42a8ed3a7f\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.158063 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-ssh-key\") pod \"3298a054-72de-4060-95c4-ff42a8ed3a7f\" (UID: \"3298a054-72de-4060-95c4-ff42a8ed3a7f\") " Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.163561 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3298a054-72de-4060-95c4-ff42a8ed3a7f" (UID: "3298a054-72de-4060-95c4-ff42a8ed3a7f"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.168171 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3298a054-72de-4060-95c4-ff42a8ed3a7f-kube-api-access-g4b9m" (OuterVolumeSpecName: "kube-api-access-g4b9m") pod "3298a054-72de-4060-95c4-ff42a8ed3a7f" (UID: "3298a054-72de-4060-95c4-ff42a8ed3a7f"). InnerVolumeSpecName "kube-api-access-g4b9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.186105 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3298a054-72de-4060-95c4-ff42a8ed3a7f" (UID: "3298a054-72de-4060-95c4-ff42a8ed3a7f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.187741 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-inventory" (OuterVolumeSpecName: "inventory") pod "3298a054-72de-4060-95c4-ff42a8ed3a7f" (UID: "3298a054-72de-4060-95c4-ff42a8ed3a7f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.259775 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.259817 4840 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.259829 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4b9m\" (UniqueName: \"kubernetes.io/projected/3298a054-72de-4060-95c4-ff42a8ed3a7f-kube-api-access-g4b9m\") on node \"crc\" DevicePath \"\"" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.259838 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3298a054-72de-4060-95c4-ff42a8ed3a7f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.574647 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" event={"ID":"3298a054-72de-4060-95c4-ff42a8ed3a7f","Type":"ContainerDied","Data":"6c715f4991d5a8129c2303094b13779b2b4ffaa8a7d28e40f74b7a48ee1b2523"} Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.574693 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c715f4991d5a8129c2303094b13779b2b4ffaa8a7d28e40f74b7a48ee1b2523" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.574755 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.657115 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf"] Dec 05 15:26:33 crc kubenswrapper[4840]: E1205 15:26:33.657571 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3298a054-72de-4060-95c4-ff42a8ed3a7f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.657592 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="3298a054-72de-4060-95c4-ff42a8ed3a7f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 15:26:33 crc kubenswrapper[4840]: E1205 15:26:33.657611 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="extract-content" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.657619 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="extract-content" Dec 05 15:26:33 crc kubenswrapper[4840]: E1205 15:26:33.657640 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="extract-utilities" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.657650 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="extract-utilities" Dec 05 15:26:33 crc kubenswrapper[4840]: E1205 15:26:33.657660 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="registry-server" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.657669 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="registry-server" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.657924 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="3298a054-72de-4060-95c4-ff42a8ed3a7f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.657967 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="3764c586-77d9-4b89-b0f0-b4ef0043da49" containerName="registry-server" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.658804 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.666858 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.666974 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.667101 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.667215 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.669822 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.670214 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk7fz\" (UniqueName: \"kubernetes.io/projected/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-kube-api-access-jk7fz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.670261 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.670558 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf"] Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.771637 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.771809 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.771848 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk7fz\" (UniqueName: \"kubernetes.io/projected/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-kube-api-access-jk7fz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.775406 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.780555 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.795855 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk7fz\" (UniqueName: \"kubernetes.io/projected/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-kube-api-access-jk7fz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tjftf\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:33 crc kubenswrapper[4840]: I1205 15:26:33.978278 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:26:34 crc kubenswrapper[4840]: I1205 15:26:34.512312 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf"] Dec 05 15:26:34 crc kubenswrapper[4840]: I1205 15:26:34.584360 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" event={"ID":"51bf8de7-4ac8-4478-af9d-7b438f6afb1c","Type":"ContainerStarted","Data":"f41762d037a63692d07c515c57da2f52565b5edc3aea073b5e046eb45242c979"} Dec 05 15:26:35 crc kubenswrapper[4840]: I1205 15:26:35.067461 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:26:35 crc kubenswrapper[4840]: E1205 15:26:35.068016 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:26:35 crc kubenswrapper[4840]: I1205 15:26:35.595696 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" event={"ID":"51bf8de7-4ac8-4478-af9d-7b438f6afb1c","Type":"ContainerStarted","Data":"7bac89fbfb3f7a9150cb0309ed56d3e9f22f3716d7cf585719477a8157dafe81"} Dec 05 15:26:35 crc kubenswrapper[4840]: I1205 15:26:35.621227 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" podStartSLOduration=2.12885777 podStartE2EDuration="2.621207302s" podCreationTimestamp="2025-12-05 15:26:33 +0000 UTC" firstStartedPulling="2025-12-05 15:26:34.515620295 +0000 UTC m=+1672.856682909" lastFinishedPulling="2025-12-05 15:26:35.007969827 +0000 UTC m=+1673.349032441" observedRunningTime="2025-12-05 15:26:35.614108211 +0000 UTC m=+1673.955170825" watchObservedRunningTime="2025-12-05 15:26:35.621207302 +0000 UTC m=+1673.962269916" Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.076982 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-251a-account-create-update-mcv4t"] Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.085531 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-r67qk"] Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.106983 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-5x7zs"] Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.126061 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-aff3-account-create-update-lh6jh"] Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.139079 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-r67qk"] Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.147233 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-251a-account-create-update-mcv4t"] Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.156446 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-aff3-account-create-update-lh6jh"] Dec 05 15:26:45 crc kubenswrapper[4840]: I1205 15:26:45.165082 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-5x7zs"] Dec 05 15:26:46 crc kubenswrapper[4840]: I1205 15:26:46.077342 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41783d81-4e56-4e4e-9335-7f978fb478c6" path="/var/lib/kubelet/pods/41783d81-4e56-4e4e-9335-7f978fb478c6/volumes" Dec 05 15:26:46 crc kubenswrapper[4840]: I1205 15:26:46.079079 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2f3441f-bb15-467f-b962-5c8b66b27cfe" path="/var/lib/kubelet/pods/c2f3441f-bb15-467f-b962-5c8b66b27cfe/volumes" Dec 05 15:26:46 crc kubenswrapper[4840]: I1205 15:26:46.080717 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4390a73-4bb5-42f6-a4dc-516f9657db60" path="/var/lib/kubelet/pods/e4390a73-4bb5-42f6-a4dc-516f9657db60/volumes" Dec 05 15:26:46 crc kubenswrapper[4840]: I1205 15:26:46.081959 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb1bdb27-1669-4d51-b689-4a48bd5f1567" path="/var/lib/kubelet/pods/eb1bdb27-1669-4d51-b689-4a48bd5f1567/volumes" Dec 05 15:26:48 crc kubenswrapper[4840]: I1205 15:26:48.067470 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:26:48 crc kubenswrapper[4840]: E1205 15:26:48.068198 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:26:51 crc kubenswrapper[4840]: I1205 15:26:51.102591 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-67a5-account-create-update-m5q8n"] Dec 05 15:26:51 crc kubenswrapper[4840]: I1205 15:26:51.113896 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-bmclv"] Dec 05 15:26:51 crc kubenswrapper[4840]: I1205 15:26:51.123999 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-bmclv"] Dec 05 15:26:51 crc kubenswrapper[4840]: I1205 15:26:51.132382 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-67a5-account-create-update-m5q8n"] Dec 05 15:26:52 crc kubenswrapper[4840]: I1205 15:26:52.077815 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0" path="/var/lib/kubelet/pods/40044a4d-5ed3-4bb3-83f7-43b0dd3b56c0/volumes" Dec 05 15:26:52 crc kubenswrapper[4840]: I1205 15:26:52.078553 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81caf91c-1e3f-48ee-b953-85b6cb26c922" path="/var/lib/kubelet/pods/81caf91c-1e3f-48ee-b953-85b6cb26c922/volumes" Dec 05 15:27:00 crc kubenswrapper[4840]: I1205 15:27:00.067760 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:27:00 crc kubenswrapper[4840]: E1205 15:27:00.068692 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:27:11 crc kubenswrapper[4840]: I1205 15:27:11.066915 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:27:11 crc kubenswrapper[4840]: E1205 15:27:11.067694 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:27:16 crc kubenswrapper[4840]: I1205 15:27:16.729980 4840 scope.go:117] "RemoveContainer" containerID="f53a3fd1a8a3f346988b00fc0abb1c8bcf0851620c8f5dd94cb0067bc2baf7b7" Dec 05 15:27:16 crc kubenswrapper[4840]: I1205 15:27:16.765129 4840 scope.go:117] "RemoveContainer" containerID="a276c9c6ae36bf4ea30ff05e9af35c521da6368de8e9ff25ca285772363bb4b5" Dec 05 15:27:16 crc kubenswrapper[4840]: I1205 15:27:16.845755 4840 scope.go:117] "RemoveContainer" containerID="cd46a90e28eb02c64db85e8de9359cb7b8fbb098d84c8f5e39a5b452ac9f33f5" Dec 05 15:27:16 crc kubenswrapper[4840]: I1205 15:27:16.914359 4840 scope.go:117] "RemoveContainer" containerID="525858ebcd8bbff8d0b9efa77f7fa96de4f7addb5b4bf543b8febb2a6d75b6e9" Dec 05 15:27:16 crc kubenswrapper[4840]: I1205 15:27:16.954270 4840 scope.go:117] "RemoveContainer" containerID="b9ccaa47bbdee3a059eb000267ae680b212947bfda89c55bbeb303e6b8627db0" Dec 05 15:27:17 crc kubenswrapper[4840]: I1205 15:27:17.004185 4840 scope.go:117] "RemoveContainer" containerID="0f7213ca076b969b3fc05dc52b961da27bf62ba6f9355abc17636d312dadc0d6" Dec 05 15:27:17 crc kubenswrapper[4840]: I1205 15:27:17.049634 4840 scope.go:117] "RemoveContainer" containerID="36438ddad0aca0bd48c529f5d2d755a35b06a9e2cfc019aba33a8727f08b2aed" Dec 05 15:27:17 crc kubenswrapper[4840]: I1205 15:27:17.081643 4840 scope.go:117] "RemoveContainer" containerID="4653ae4f6ccf64e84418344bbeb5ac12d4c1c8bacf808246fcf600a23d41f91d" Dec 05 15:27:17 crc kubenswrapper[4840]: I1205 15:27:17.149454 4840 scope.go:117] "RemoveContainer" containerID="f2f41802d6f41dd2f191c6987238c7da7df6a159bf374fa300d59f7d07121acd" Dec 05 15:27:17 crc kubenswrapper[4840]: I1205 15:27:17.177979 4840 scope.go:117] "RemoveContainer" containerID="60f57e86b1c64d919067f5f25caceeb5f9bd970ab618ae418f090145fabe5eab" Dec 05 15:27:24 crc kubenswrapper[4840]: I1205 15:27:24.067105 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:27:24 crc kubenswrapper[4840]: E1205 15:27:24.068532 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.064062 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-649b-account-create-update-dj8zx"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.081352 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-e3a9-account-create-update-fv9lq"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.082745 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-kjhz7"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.090854 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-nvknm"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.098625 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-knjqv"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.106507 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-knjqv"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.113830 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-c263-account-create-update-28ql2"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.120736 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-e3a9-account-create-update-fv9lq"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.127578 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-649b-account-create-update-dj8zx"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.135097 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-c263-account-create-update-28ql2"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.142725 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-nvknm"] Dec 05 15:27:34 crc kubenswrapper[4840]: I1205 15:27:34.151229 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-kjhz7"] Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.037306 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-nc7h9"] Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.047783 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-nc7h9"] Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.079603 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6240b61e-5c9f-4590-9b17-c0faae5242cb" path="/var/lib/kubelet/pods/6240b61e-5c9f-4590-9b17-c0faae5242cb/volumes" Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.081186 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8" path="/var/lib/kubelet/pods/6bd7ae20-6c8e-4ac4-ad2c-e5ee31c836c8/volumes" Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.082569 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="700cf304-d742-425e-9daf-f56b05297d38" path="/var/lib/kubelet/pods/700cf304-d742-425e-9daf-f56b05297d38/volumes" Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.083787 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b380884-f228-426c-bf7c-ab261af14b51" path="/var/lib/kubelet/pods/8b380884-f228-426c-bf7c-ab261af14b51/volumes" Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.085706 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1360652-5788-4fce-9395-312b4e57b7f3" path="/var/lib/kubelet/pods/c1360652-5788-4fce-9395-312b4e57b7f3/volumes" Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.086901 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1c6cb4f-2b81-41ef-8d84-a207325aa9b8" path="/var/lib/kubelet/pods/e1c6cb4f-2b81-41ef-8d84-a207325aa9b8/volumes" Dec 05 15:27:36 crc kubenswrapper[4840]: I1205 15:27:36.087844 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e87241c9-95a1-4890-b5c7-0d4a68d1910f" path="/var/lib/kubelet/pods/e87241c9-95a1-4890-b5c7-0d4a68d1910f/volumes" Dec 05 15:27:39 crc kubenswrapper[4840]: I1205 15:27:39.053092 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-k7szk"] Dec 05 15:27:39 crc kubenswrapper[4840]: I1205 15:27:39.068992 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-k7szk"] Dec 05 15:27:39 crc kubenswrapper[4840]: I1205 15:27:39.070036 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:27:39 crc kubenswrapper[4840]: E1205 15:27:39.070470 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:27:40 crc kubenswrapper[4840]: I1205 15:27:40.078892 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="914b1f5a-b458-459d-ac76-d06ffd0ef611" path="/var/lib/kubelet/pods/914b1f5a-b458-459d-ac76-d06ffd0ef611/volumes" Dec 05 15:27:51 crc kubenswrapper[4840]: I1205 15:27:51.068186 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:27:51 crc kubenswrapper[4840]: E1205 15:27:51.072674 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:28:05 crc kubenswrapper[4840]: I1205 15:28:05.067624 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:28:05 crc kubenswrapper[4840]: E1205 15:28:05.068521 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:28:15 crc kubenswrapper[4840]: I1205 15:28:15.052704 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4n4lf"] Dec 05 15:28:15 crc kubenswrapper[4840]: I1205 15:28:15.062013 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4n4lf"] Dec 05 15:28:16 crc kubenswrapper[4840]: I1205 15:28:16.083518 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="509c87f8-fee0-4a27-ad42-91629218a636" path="/var/lib/kubelet/pods/509c87f8-fee0-4a27-ad42-91629218a636/volumes" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.067117 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:28:17 crc kubenswrapper[4840]: E1205 15:28:17.067652 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.320738 4840 scope.go:117] "RemoveContainer" containerID="fb5d662d89f08d0c7320fe93d01a371cb676016cedc3b4e984e7fbd6bbd2a1ef" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.367814 4840 scope.go:117] "RemoveContainer" containerID="684399bcb7fea862bc4a0711b7c4613ea610382c95138fb3587ae78399ad1ffa" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.414112 4840 scope.go:117] "RemoveContainer" containerID="878c61df4934b3bdaa31d3473faa6c866cd238ba99a2bba9f8c36d6ab0153e9e" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.465387 4840 scope.go:117] "RemoveContainer" containerID="1e063e58e8056843d23d3cce09fd405c30b78db53acd91021026019c84d940a6" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.525694 4840 scope.go:117] "RemoveContainer" containerID="2d3968b6397659a06ad6cae41d1ed9c5f662412c481c100caaf34ff0a72a6929" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.545469 4840 scope.go:117] "RemoveContainer" containerID="9af18c55e60b9074cfb6f27216d86ce1312856a93f46d72949a87af7aac3d771" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.586164 4840 scope.go:117] "RemoveContainer" containerID="c1f5be28a867337c667fb0b5f8bbb1b69cf7036f3d77cfe48acedc9847b1e37b" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.622674 4840 scope.go:117] "RemoveContainer" containerID="2514754c331203c53bd291a5da392f1726ba8e83e03150eeb5d5337a50797a3d" Dec 05 15:28:17 crc kubenswrapper[4840]: I1205 15:28:17.641245 4840 scope.go:117] "RemoveContainer" containerID="eae57e6c8c36c1fcb90f1e64dc9c58f1b0f35014599ff0c1c353296ffbe56b77" Dec 05 15:28:22 crc kubenswrapper[4840]: I1205 15:28:22.721303 4840 generic.go:334] "Generic (PLEG): container finished" podID="51bf8de7-4ac8-4478-af9d-7b438f6afb1c" containerID="7bac89fbfb3f7a9150cb0309ed56d3e9f22f3716d7cf585719477a8157dafe81" exitCode=0 Dec 05 15:28:22 crc kubenswrapper[4840]: I1205 15:28:22.721375 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" event={"ID":"51bf8de7-4ac8-4478-af9d-7b438f6afb1c","Type":"ContainerDied","Data":"7bac89fbfb3f7a9150cb0309ed56d3e9f22f3716d7cf585719477a8157dafe81"} Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.215957 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.349405 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-ssh-key\") pod \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.349597 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk7fz\" (UniqueName: \"kubernetes.io/projected/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-kube-api-access-jk7fz\") pod \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.349683 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-inventory\") pod \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\" (UID: \"51bf8de7-4ac8-4478-af9d-7b438f6afb1c\") " Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.356478 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-kube-api-access-jk7fz" (OuterVolumeSpecName: "kube-api-access-jk7fz") pod "51bf8de7-4ac8-4478-af9d-7b438f6afb1c" (UID: "51bf8de7-4ac8-4478-af9d-7b438f6afb1c"). InnerVolumeSpecName "kube-api-access-jk7fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.380361 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "51bf8de7-4ac8-4478-af9d-7b438f6afb1c" (UID: "51bf8de7-4ac8-4478-af9d-7b438f6afb1c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.384129 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-inventory" (OuterVolumeSpecName: "inventory") pod "51bf8de7-4ac8-4478-af9d-7b438f6afb1c" (UID: "51bf8de7-4ac8-4478-af9d-7b438f6afb1c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.452263 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.452304 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk7fz\" (UniqueName: \"kubernetes.io/projected/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-kube-api-access-jk7fz\") on node \"crc\" DevicePath \"\"" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.452322 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51bf8de7-4ac8-4478-af9d-7b438f6afb1c-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.740583 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" event={"ID":"51bf8de7-4ac8-4478-af9d-7b438f6afb1c","Type":"ContainerDied","Data":"f41762d037a63692d07c515c57da2f52565b5edc3aea073b5e046eb45242c979"} Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.741201 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f41762d037a63692d07c515c57da2f52565b5edc3aea073b5e046eb45242c979" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.740725 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tjftf" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.830036 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5"] Dec 05 15:28:24 crc kubenswrapper[4840]: E1205 15:28:24.830454 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51bf8de7-4ac8-4478-af9d-7b438f6afb1c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.830471 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="51bf8de7-4ac8-4478-af9d-7b438f6afb1c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.830699 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="51bf8de7-4ac8-4478-af9d-7b438f6afb1c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.831441 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.834177 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.834678 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.835223 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.835481 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.848461 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5"] Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.966625 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc2ds\" (UniqueName: \"kubernetes.io/projected/997f1855-be81-4a43-94c8-2f6001a12c0d-kube-api-access-dc2ds\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.966784 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:24 crc kubenswrapper[4840]: I1205 15:28:24.966808 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.068434 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc2ds\" (UniqueName: \"kubernetes.io/projected/997f1855-be81-4a43-94c8-2f6001a12c0d-kube-api-access-dc2ds\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.068710 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.068748 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.075760 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.076978 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.097427 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc2ds\" (UniqueName: \"kubernetes.io/projected/997f1855-be81-4a43-94c8-2f6001a12c0d-kube-api-access-dc2ds\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jftg5\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.153850 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.648431 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5"] Dec 05 15:28:25 crc kubenswrapper[4840]: I1205 15:28:25.749988 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" event={"ID":"997f1855-be81-4a43-94c8-2f6001a12c0d","Type":"ContainerStarted","Data":"fdffafdb883f2e65ba1496211214dc64bf799ffa56ad0ff890431f9136eb4f1b"} Dec 05 15:28:26 crc kubenswrapper[4840]: I1205 15:28:26.059023 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-dfgrm"] Dec 05 15:28:26 crc kubenswrapper[4840]: I1205 15:28:26.085074 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-dfgrm"] Dec 05 15:28:26 crc kubenswrapper[4840]: I1205 15:28:26.090067 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-4bjv4"] Dec 05 15:28:26 crc kubenswrapper[4840]: I1205 15:28:26.100791 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-4bjv4"] Dec 05 15:28:26 crc kubenswrapper[4840]: I1205 15:28:26.757960 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" event={"ID":"997f1855-be81-4a43-94c8-2f6001a12c0d","Type":"ContainerStarted","Data":"5d202b39e1f16ffebb4e569aa7d47db8895283a4b4a76d8ce2965dd61a5072af"} Dec 05 15:28:26 crc kubenswrapper[4840]: I1205 15:28:26.876614 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" podStartSLOduration=2.441509214 podStartE2EDuration="2.876595394s" podCreationTimestamp="2025-12-05 15:28:24 +0000 UTC" firstStartedPulling="2025-12-05 15:28:25.656890896 +0000 UTC m=+1783.997953540" lastFinishedPulling="2025-12-05 15:28:26.091977116 +0000 UTC m=+1784.433039720" observedRunningTime="2025-12-05 15:28:26.773830573 +0000 UTC m=+1785.114893187" watchObservedRunningTime="2025-12-05 15:28:26.876595394 +0000 UTC m=+1785.217658028" Dec 05 15:28:28 crc kubenswrapper[4840]: I1205 15:28:28.068082 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:28:28 crc kubenswrapper[4840]: E1205 15:28:28.068636 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:28:28 crc kubenswrapper[4840]: I1205 15:28:28.078410 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705" path="/var/lib/kubelet/pods/1c43fa4f-ab9c-4ffb-8945-b7d8ac52a705/volumes" Dec 05 15:28:28 crc kubenswrapper[4840]: I1205 15:28:28.079646 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dab20e36-d38f-4c5f-9d42-028c9df5ca51" path="/var/lib/kubelet/pods/dab20e36-d38f-4c5f-9d42-028c9df5ca51/volumes" Dec 05 15:28:36 crc kubenswrapper[4840]: I1205 15:28:36.035853 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-5ccg9"] Dec 05 15:28:36 crc kubenswrapper[4840]: I1205 15:28:36.048911 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-5ccg9"] Dec 05 15:28:36 crc kubenswrapper[4840]: I1205 15:28:36.077816 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5731646f-d8c6-4bfd-b815-3d68c244d801" path="/var/lib/kubelet/pods/5731646f-d8c6-4bfd-b815-3d68c244d801/volumes" Dec 05 15:28:39 crc kubenswrapper[4840]: I1205 15:28:39.038222 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-szwv8"] Dec 05 15:28:39 crc kubenswrapper[4840]: I1205 15:28:39.051789 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-szwv8"] Dec 05 15:28:40 crc kubenswrapper[4840]: I1205 15:28:40.067343 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:28:40 crc kubenswrapper[4840]: E1205 15:28:40.067793 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:28:40 crc kubenswrapper[4840]: I1205 15:28:40.087383 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae68e2b9-f51d-4486-952d-73c097fbaac4" path="/var/lib/kubelet/pods/ae68e2b9-f51d-4486-952d-73c097fbaac4/volumes" Dec 05 15:28:55 crc kubenswrapper[4840]: I1205 15:28:55.067032 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:28:55 crc kubenswrapper[4840]: E1205 15:28:55.068292 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:29:10 crc kubenswrapper[4840]: I1205 15:29:10.068146 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:29:10 crc kubenswrapper[4840]: E1205 15:29:10.069146 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:29:17 crc kubenswrapper[4840]: I1205 15:29:17.819492 4840 scope.go:117] "RemoveContainer" containerID="0b4a51fac2376cf863d44ecaf5e8beb125f8aaa08e41afa6db8327aa01728bdf" Dec 05 15:29:17 crc kubenswrapper[4840]: I1205 15:29:17.871319 4840 scope.go:117] "RemoveContainer" containerID="48529d1c7e0e70882af343ccb984e6ec4a3d9734cd0bcbdc9f334b455947d1a7" Dec 05 15:29:17 crc kubenswrapper[4840]: I1205 15:29:17.927228 4840 scope.go:117] "RemoveContainer" containerID="53024aebaef310348e49018d50f7757dfd8bc4eee143e1f7bdc94038d4eacdac" Dec 05 15:29:17 crc kubenswrapper[4840]: I1205 15:29:17.961492 4840 scope.go:117] "RemoveContainer" containerID="bc012b34148f49f693654bc7d62d6926573b82d7141cfc524069457314892e7f" Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.047232 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-j458g"] Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.060328 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-09f2-account-create-update-m5w9w"] Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.079465 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-m2tqf"] Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.084748 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-gtmvq"] Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.092812 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-09f2-account-create-update-m5w9w"] Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.099954 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-j458g"] Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.106912 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-m2tqf"] Dec 05 15:29:22 crc kubenswrapper[4840]: I1205 15:29:22.114065 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-gtmvq"] Dec 05 15:29:23 crc kubenswrapper[4840]: I1205 15:29:23.039491 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-47ef-account-create-update-9v4gn"] Dec 05 15:29:23 crc kubenswrapper[4840]: I1205 15:29:23.053396 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-1cfe-account-create-update-g6pb6"] Dec 05 15:29:23 crc kubenswrapper[4840]: I1205 15:29:23.062567 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-47ef-account-create-update-9v4gn"] Dec 05 15:29:23 crc kubenswrapper[4840]: I1205 15:29:23.071520 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:29:23 crc kubenswrapper[4840]: E1205 15:29:23.072348 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:29:23 crc kubenswrapper[4840]: I1205 15:29:23.079307 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-1cfe-account-create-update-g6pb6"] Dec 05 15:29:24 crc kubenswrapper[4840]: I1205 15:29:24.078714 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07957586-8bfc-4935-a092-2245ac49771a" path="/var/lib/kubelet/pods/07957586-8bfc-4935-a092-2245ac49771a/volumes" Dec 05 15:29:24 crc kubenswrapper[4840]: I1205 15:29:24.079757 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48ad6b4f-c38d-40f9-9348-1af570cb7f35" path="/var/lib/kubelet/pods/48ad6b4f-c38d-40f9-9348-1af570cb7f35/volumes" Dec 05 15:29:24 crc kubenswrapper[4840]: I1205 15:29:24.080465 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72d018e0-5ef5-45a1-914e-e733965b1089" path="/var/lib/kubelet/pods/72d018e0-5ef5-45a1-914e-e733965b1089/volumes" Dec 05 15:29:24 crc kubenswrapper[4840]: I1205 15:29:24.081084 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b6a2385-87c2-4077-a39a-440e4e3d3e51" path="/var/lib/kubelet/pods/8b6a2385-87c2-4077-a39a-440e4e3d3e51/volumes" Dec 05 15:29:24 crc kubenswrapper[4840]: I1205 15:29:24.082385 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1e31d97-cf03-4d2a-abd2-14cf20e60d90" path="/var/lib/kubelet/pods/b1e31d97-cf03-4d2a-abd2-14cf20e60d90/volumes" Dec 05 15:29:24 crc kubenswrapper[4840]: I1205 15:29:24.082960 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f53be4d9-c650-4344-8a6f-79258ea44a6c" path="/var/lib/kubelet/pods/f53be4d9-c650-4344-8a6f-79258ea44a6c/volumes" Dec 05 15:29:36 crc kubenswrapper[4840]: I1205 15:29:36.067196 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:29:36 crc kubenswrapper[4840]: E1205 15:29:36.068530 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:29:44 crc kubenswrapper[4840]: I1205 15:29:44.516065 4840 generic.go:334] "Generic (PLEG): container finished" podID="997f1855-be81-4a43-94c8-2f6001a12c0d" containerID="5d202b39e1f16ffebb4e569aa7d47db8895283a4b4a76d8ce2965dd61a5072af" exitCode=0 Dec 05 15:29:44 crc kubenswrapper[4840]: I1205 15:29:44.516159 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" event={"ID":"997f1855-be81-4a43-94c8-2f6001a12c0d","Type":"ContainerDied","Data":"5d202b39e1f16ffebb4e569aa7d47db8895283a4b4a76d8ce2965dd61a5072af"} Dec 05 15:29:45 crc kubenswrapper[4840]: I1205 15:29:45.920420 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.044087 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dc2ds\" (UniqueName: \"kubernetes.io/projected/997f1855-be81-4a43-94c8-2f6001a12c0d-kube-api-access-dc2ds\") pod \"997f1855-be81-4a43-94c8-2f6001a12c0d\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.044205 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-inventory\") pod \"997f1855-be81-4a43-94c8-2f6001a12c0d\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.044293 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-ssh-key\") pod \"997f1855-be81-4a43-94c8-2f6001a12c0d\" (UID: \"997f1855-be81-4a43-94c8-2f6001a12c0d\") " Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.050522 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/997f1855-be81-4a43-94c8-2f6001a12c0d-kube-api-access-dc2ds" (OuterVolumeSpecName: "kube-api-access-dc2ds") pod "997f1855-be81-4a43-94c8-2f6001a12c0d" (UID: "997f1855-be81-4a43-94c8-2f6001a12c0d"). InnerVolumeSpecName "kube-api-access-dc2ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.074160 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-inventory" (OuterVolumeSpecName: "inventory") pod "997f1855-be81-4a43-94c8-2f6001a12c0d" (UID: "997f1855-be81-4a43-94c8-2f6001a12c0d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.083234 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "997f1855-be81-4a43-94c8-2f6001a12c0d" (UID: "997f1855-be81-4a43-94c8-2f6001a12c0d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.147070 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dc2ds\" (UniqueName: \"kubernetes.io/projected/997f1855-be81-4a43-94c8-2f6001a12c0d-kube-api-access-dc2ds\") on node \"crc\" DevicePath \"\"" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.147105 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.147115 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/997f1855-be81-4a43-94c8-2f6001a12c0d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.535368 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" event={"ID":"997f1855-be81-4a43-94c8-2f6001a12c0d","Type":"ContainerDied","Data":"fdffafdb883f2e65ba1496211214dc64bf799ffa56ad0ff890431f9136eb4f1b"} Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.535410 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdffafdb883f2e65ba1496211214dc64bf799ffa56ad0ff890431f9136eb4f1b" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.535431 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jftg5" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.625411 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f"] Dec 05 15:29:46 crc kubenswrapper[4840]: E1205 15:29:46.626609 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="997f1855-be81-4a43-94c8-2f6001a12c0d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.626648 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="997f1855-be81-4a43-94c8-2f6001a12c0d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.626881 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="997f1855-be81-4a43-94c8-2f6001a12c0d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.627742 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.629958 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.633512 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f"] Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.634490 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.634534 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.634533 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.655121 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.655221 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.655384 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4hdl\" (UniqueName: \"kubernetes.io/projected/d55f8919-f3d7-4080-9573-b92529c9ec9f-kube-api-access-c4hdl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.757551 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.757646 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.757746 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4hdl\" (UniqueName: \"kubernetes.io/projected/d55f8919-f3d7-4080-9573-b92529c9ec9f-kube-api-access-c4hdl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.761674 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.762742 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.786820 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4hdl\" (UniqueName: \"kubernetes.io/projected/d55f8919-f3d7-4080-9573-b92529c9ec9f-kube-api-access-c4hdl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:46 crc kubenswrapper[4840]: I1205 15:29:46.945009 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:47 crc kubenswrapper[4840]: I1205 15:29:47.498054 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f"] Dec 05 15:29:47 crc kubenswrapper[4840]: I1205 15:29:47.550241 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" event={"ID":"d55f8919-f3d7-4080-9573-b92529c9ec9f","Type":"ContainerStarted","Data":"522ec87149b4b5673f14a4e92dc7b03f37c3d954c6d95866285c47ba517f143c"} Dec 05 15:29:48 crc kubenswrapper[4840]: I1205 15:29:48.067407 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:29:48 crc kubenswrapper[4840]: E1205 15:29:48.067940 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:29:48 crc kubenswrapper[4840]: I1205 15:29:48.561132 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" event={"ID":"d55f8919-f3d7-4080-9573-b92529c9ec9f","Type":"ContainerStarted","Data":"88cd403c3eb8016d0bf7fc83baa1e38677c15414a192af41d7795386c4a61919"} Dec 05 15:29:48 crc kubenswrapper[4840]: I1205 15:29:48.587232 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" podStartSLOduration=2.008298927 podStartE2EDuration="2.587200948s" podCreationTimestamp="2025-12-05 15:29:46 +0000 UTC" firstStartedPulling="2025-12-05 15:29:47.499487869 +0000 UTC m=+1865.840550483" lastFinishedPulling="2025-12-05 15:29:48.0783899 +0000 UTC m=+1866.419452504" observedRunningTime="2025-12-05 15:29:48.580525719 +0000 UTC m=+1866.921588343" watchObservedRunningTime="2025-12-05 15:29:48.587200948 +0000 UTC m=+1866.928263562" Dec 05 15:29:53 crc kubenswrapper[4840]: I1205 15:29:53.619150 4840 generic.go:334] "Generic (PLEG): container finished" podID="d55f8919-f3d7-4080-9573-b92529c9ec9f" containerID="88cd403c3eb8016d0bf7fc83baa1e38677c15414a192af41d7795386c4a61919" exitCode=0 Dec 05 15:29:53 crc kubenswrapper[4840]: I1205 15:29:53.619216 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" event={"ID":"d55f8919-f3d7-4080-9573-b92529c9ec9f","Type":"ContainerDied","Data":"88cd403c3eb8016d0bf7fc83baa1e38677c15414a192af41d7795386c4a61919"} Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.039673 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.232528 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-ssh-key\") pod \"d55f8919-f3d7-4080-9573-b92529c9ec9f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.232932 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4hdl\" (UniqueName: \"kubernetes.io/projected/d55f8919-f3d7-4080-9573-b92529c9ec9f-kube-api-access-c4hdl\") pod \"d55f8919-f3d7-4080-9573-b92529c9ec9f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.233003 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-inventory\") pod \"d55f8919-f3d7-4080-9573-b92529c9ec9f\" (UID: \"d55f8919-f3d7-4080-9573-b92529c9ec9f\") " Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.243329 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d55f8919-f3d7-4080-9573-b92529c9ec9f-kube-api-access-c4hdl" (OuterVolumeSpecName: "kube-api-access-c4hdl") pod "d55f8919-f3d7-4080-9573-b92529c9ec9f" (UID: "d55f8919-f3d7-4080-9573-b92529c9ec9f"). InnerVolumeSpecName "kube-api-access-c4hdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.268148 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-inventory" (OuterVolumeSpecName: "inventory") pod "d55f8919-f3d7-4080-9573-b92529c9ec9f" (UID: "d55f8919-f3d7-4080-9573-b92529c9ec9f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.268702 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d55f8919-f3d7-4080-9573-b92529c9ec9f" (UID: "d55f8919-f3d7-4080-9573-b92529c9ec9f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.334935 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.334976 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4hdl\" (UniqueName: \"kubernetes.io/projected/d55f8919-f3d7-4080-9573-b92529c9ec9f-kube-api-access-c4hdl\") on node \"crc\" DevicePath \"\"" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.334991 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d55f8919-f3d7-4080-9573-b92529c9ec9f-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.639511 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" event={"ID":"d55f8919-f3d7-4080-9573-b92529c9ec9f","Type":"ContainerDied","Data":"522ec87149b4b5673f14a4e92dc7b03f37c3d954c6d95866285c47ba517f143c"} Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.639561 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="522ec87149b4b5673f14a4e92dc7b03f37c3d954c6d95866285c47ba517f143c" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.639578 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.709343 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v"] Dec 05 15:29:55 crc kubenswrapper[4840]: E1205 15:29:55.709733 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d55f8919-f3d7-4080-9573-b92529c9ec9f" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.709752 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d55f8919-f3d7-4080-9573-b92529c9ec9f" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.709977 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d55f8919-f3d7-4080-9573-b92529c9ec9f" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.710591 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.715714 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.715887 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.716063 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.716575 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.727167 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v"] Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.741850 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.741944 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bhsr\" (UniqueName: \"kubernetes.io/projected/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-kube-api-access-6bhsr\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.742176 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.843850 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.844001 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bhsr\" (UniqueName: \"kubernetes.io/projected/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-kube-api-access-6bhsr\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.844141 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.852586 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.852687 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:55 crc kubenswrapper[4840]: I1205 15:29:55.862859 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bhsr\" (UniqueName: \"kubernetes.io/projected/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-kube-api-access-6bhsr\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t788v\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:56 crc kubenswrapper[4840]: I1205 15:29:56.049401 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:29:56 crc kubenswrapper[4840]: I1205 15:29:56.565503 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v"] Dec 05 15:29:56 crc kubenswrapper[4840]: W1205 15:29:56.571605 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30a7c1b0_8c3f_48e7_be82_bc57e708cd5e.slice/crio-0468de1a62430cbe99d8d004e1b8824e63512e4d2917ef5e03f2815a0c7061ca WatchSource:0}: Error finding container 0468de1a62430cbe99d8d004e1b8824e63512e4d2917ef5e03f2815a0c7061ca: Status 404 returned error can't find the container with id 0468de1a62430cbe99d8d004e1b8824e63512e4d2917ef5e03f2815a0c7061ca Dec 05 15:29:56 crc kubenswrapper[4840]: I1205 15:29:56.648642 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" event={"ID":"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e","Type":"ContainerStarted","Data":"0468de1a62430cbe99d8d004e1b8824e63512e4d2917ef5e03f2815a0c7061ca"} Dec 05 15:29:57 crc kubenswrapper[4840]: I1205 15:29:57.658667 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" event={"ID":"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e","Type":"ContainerStarted","Data":"60c73de0c41fe01eee810d9d20428f47ba6d51ed2c8e5d27f5c104f0f8436aa6"} Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.131250 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" podStartSLOduration=4.424954748 podStartE2EDuration="5.131224408s" podCreationTimestamp="2025-12-05 15:29:55 +0000 UTC" firstStartedPulling="2025-12-05 15:29:56.574924575 +0000 UTC m=+1874.915987229" lastFinishedPulling="2025-12-05 15:29:57.281194275 +0000 UTC m=+1875.622256889" observedRunningTime="2025-12-05 15:29:57.674726758 +0000 UTC m=+1876.015789372" watchObservedRunningTime="2025-12-05 15:30:00.131224408 +0000 UTC m=+1878.472287022" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.138421 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg"] Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.140578 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.143275 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.143793 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.148673 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg"] Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.251450 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34b64b90-dd33-474c-a9b8-5a1d8c496cea-config-volume\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.251795 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34b64b90-dd33-474c-a9b8-5a1d8c496cea-secret-volume\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.251933 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbk4c\" (UniqueName: \"kubernetes.io/projected/34b64b90-dd33-474c-a9b8-5a1d8c496cea-kube-api-access-rbk4c\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.363906 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34b64b90-dd33-474c-a9b8-5a1d8c496cea-secret-volume\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.364221 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbk4c\" (UniqueName: \"kubernetes.io/projected/34b64b90-dd33-474c-a9b8-5a1d8c496cea-kube-api-access-rbk4c\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.365352 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34b64b90-dd33-474c-a9b8-5a1d8c496cea-config-volume\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.368206 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34b64b90-dd33-474c-a9b8-5a1d8c496cea-config-volume\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.374856 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34b64b90-dd33-474c-a9b8-5a1d8c496cea-secret-volume\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.391528 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbk4c\" (UniqueName: \"kubernetes.io/projected/34b64b90-dd33-474c-a9b8-5a1d8c496cea-kube-api-access-rbk4c\") pod \"collect-profiles-29415810-2cskg\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:00 crc kubenswrapper[4840]: I1205 15:30:00.527625 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:01 crc kubenswrapper[4840]: I1205 15:30:01.089681 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg"] Dec 05 15:30:01 crc kubenswrapper[4840]: W1205 15:30:01.099056 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34b64b90_dd33_474c_a9b8_5a1d8c496cea.slice/crio-2bbace83dd9ada64774b69b819761cbefe2f78351126e3955fd2e54024fef3cc WatchSource:0}: Error finding container 2bbace83dd9ada64774b69b819761cbefe2f78351126e3955fd2e54024fef3cc: Status 404 returned error can't find the container with id 2bbace83dd9ada64774b69b819761cbefe2f78351126e3955fd2e54024fef3cc Dec 05 15:30:01 crc kubenswrapper[4840]: I1205 15:30:01.697406 4840 generic.go:334] "Generic (PLEG): container finished" podID="34b64b90-dd33-474c-a9b8-5a1d8c496cea" containerID="3c005d6afcaf0066fa359b68823abe6b3713a1297c4cdeac932513d4ba65aca8" exitCode=0 Dec 05 15:30:01 crc kubenswrapper[4840]: I1205 15:30:01.697487 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" event={"ID":"34b64b90-dd33-474c-a9b8-5a1d8c496cea","Type":"ContainerDied","Data":"3c005d6afcaf0066fa359b68823abe6b3713a1297c4cdeac932513d4ba65aca8"} Dec 05 15:30:01 crc kubenswrapper[4840]: I1205 15:30:01.697796 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" event={"ID":"34b64b90-dd33-474c-a9b8-5a1d8c496cea","Type":"ContainerStarted","Data":"2bbace83dd9ada64774b69b819761cbefe2f78351126e3955fd2e54024fef3cc"} Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.041382 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.044474 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jhpgl"] Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.053373 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-jhpgl"] Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.067362 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.297385 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34b64b90-dd33-474c-a9b8-5a1d8c496cea-config-volume\") pod \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.297770 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34b64b90-dd33-474c-a9b8-5a1d8c496cea-secret-volume\") pod \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.297993 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbk4c\" (UniqueName: \"kubernetes.io/projected/34b64b90-dd33-474c-a9b8-5a1d8c496cea-kube-api-access-rbk4c\") pod \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\" (UID: \"34b64b90-dd33-474c-a9b8-5a1d8c496cea\") " Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.298090 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b64b90-dd33-474c-a9b8-5a1d8c496cea-config-volume" (OuterVolumeSpecName: "config-volume") pod "34b64b90-dd33-474c-a9b8-5a1d8c496cea" (UID: "34b64b90-dd33-474c-a9b8-5a1d8c496cea"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.298639 4840 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34b64b90-dd33-474c-a9b8-5a1d8c496cea-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.307015 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34b64b90-dd33-474c-a9b8-5a1d8c496cea-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "34b64b90-dd33-474c-a9b8-5a1d8c496cea" (UID: "34b64b90-dd33-474c-a9b8-5a1d8c496cea"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.309031 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34b64b90-dd33-474c-a9b8-5a1d8c496cea-kube-api-access-rbk4c" (OuterVolumeSpecName: "kube-api-access-rbk4c") pod "34b64b90-dd33-474c-a9b8-5a1d8c496cea" (UID: "34b64b90-dd33-474c-a9b8-5a1d8c496cea"). InnerVolumeSpecName "kube-api-access-rbk4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.400572 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbk4c\" (UniqueName: \"kubernetes.io/projected/34b64b90-dd33-474c-a9b8-5a1d8c496cea-kube-api-access-rbk4c\") on node \"crc\" DevicePath \"\"" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.400637 4840 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34b64b90-dd33-474c-a9b8-5a1d8c496cea-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.716035 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" event={"ID":"34b64b90-dd33-474c-a9b8-5a1d8c496cea","Type":"ContainerDied","Data":"2bbace83dd9ada64774b69b819761cbefe2f78351126e3955fd2e54024fef3cc"} Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.716381 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bbace83dd9ada64774b69b819761cbefe2f78351126e3955fd2e54024fef3cc" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.716057 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415810-2cskg" Dec 05 15:30:03 crc kubenswrapper[4840]: I1205 15:30:03.726931 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"4326a1eba95fff009410270bad695965f3fad46dfb07fed1bafd6c12a2802aa3"} Dec 05 15:30:04 crc kubenswrapper[4840]: I1205 15:30:04.077761 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd12815f-fbf2-4030-847f-d843ac9f5343" path="/var/lib/kubelet/pods/fd12815f-fbf2-4030-847f-d843ac9f5343/volumes" Dec 05 15:30:18 crc kubenswrapper[4840]: I1205 15:30:18.075661 4840 scope.go:117] "RemoveContainer" containerID="33d80cef45fe10e356f62de1e37db8ebc8fc9842eaeaa667280c7df351bd4787" Dec 05 15:30:18 crc kubenswrapper[4840]: I1205 15:30:18.108014 4840 scope.go:117] "RemoveContainer" containerID="25a7b4eb05f2ded88ad31c3bea9c2da18a7d3f6c2966a7aca75748f67e0440bf" Dec 05 15:30:18 crc kubenswrapper[4840]: I1205 15:30:18.161246 4840 scope.go:117] "RemoveContainer" containerID="028c7689d356b5f46f9b8d1571b365c89b6e769ef46ba15e3f792e7163c005ff" Dec 05 15:30:18 crc kubenswrapper[4840]: I1205 15:30:18.201808 4840 scope.go:117] "RemoveContainer" containerID="acd03ef676da987ff3d92787514e19ff67950047796874abb4684f85420b1603" Dec 05 15:30:18 crc kubenswrapper[4840]: I1205 15:30:18.243993 4840 scope.go:117] "RemoveContainer" containerID="6b12bc79556bdc8de3058cc82a9e6df2f5c7ca0e31fa1eeccd6f92664993c2dd" Dec 05 15:30:18 crc kubenswrapper[4840]: I1205 15:30:18.318367 4840 scope.go:117] "RemoveContainer" containerID="f8ec05afa270f81e987ca39d111711c944f93906790227bfb7847599ae2adb0d" Dec 05 15:30:18 crc kubenswrapper[4840]: I1205 15:30:18.344092 4840 scope.go:117] "RemoveContainer" containerID="ff4cd977d2839608225ad43df4395d5880a4d6a4dcce5f35f885b3a0dffe65d9" Dec 05 15:30:25 crc kubenswrapper[4840]: I1205 15:30:25.045415 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmprz"] Dec 05 15:30:25 crc kubenswrapper[4840]: I1205 15:30:25.054569 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmprz"] Dec 05 15:30:26 crc kubenswrapper[4840]: I1205 15:30:26.039012 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-mbqt2"] Dec 05 15:30:26 crc kubenswrapper[4840]: I1205 15:30:26.049028 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-mbqt2"] Dec 05 15:30:26 crc kubenswrapper[4840]: I1205 15:30:26.077670 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eb8edca-e2c3-414d-99d6-eb12987292da" path="/var/lib/kubelet/pods/1eb8edca-e2c3-414d-99d6-eb12987292da/volumes" Dec 05 15:30:26 crc kubenswrapper[4840]: I1205 15:30:26.078480 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f56cb32-aedc-48db-bc49-99c6d668ce1b" path="/var/lib/kubelet/pods/8f56cb32-aedc-48db-bc49-99c6d668ce1b/volumes" Dec 05 15:30:37 crc kubenswrapper[4840]: I1205 15:30:37.020129 4840 generic.go:334] "Generic (PLEG): container finished" podID="30a7c1b0-8c3f-48e7-be82-bc57e708cd5e" containerID="60c73de0c41fe01eee810d9d20428f47ba6d51ed2c8e5d27f5c104f0f8436aa6" exitCode=0 Dec 05 15:30:37 crc kubenswrapper[4840]: I1205 15:30:37.020214 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" event={"ID":"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e","Type":"ContainerDied","Data":"60c73de0c41fe01eee810d9d20428f47ba6d51ed2c8e5d27f5c104f0f8436aa6"} Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.473939 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.631238 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bhsr\" (UniqueName: \"kubernetes.io/projected/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-kube-api-access-6bhsr\") pod \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.631376 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-inventory\") pod \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.631463 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-ssh-key\") pod \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\" (UID: \"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e\") " Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.636751 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-kube-api-access-6bhsr" (OuterVolumeSpecName: "kube-api-access-6bhsr") pod "30a7c1b0-8c3f-48e7-be82-bc57e708cd5e" (UID: "30a7c1b0-8c3f-48e7-be82-bc57e708cd5e"). InnerVolumeSpecName "kube-api-access-6bhsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.665051 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-inventory" (OuterVolumeSpecName: "inventory") pod "30a7c1b0-8c3f-48e7-be82-bc57e708cd5e" (UID: "30a7c1b0-8c3f-48e7-be82-bc57e708cd5e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.685216 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "30a7c1b0-8c3f-48e7-be82-bc57e708cd5e" (UID: "30a7c1b0-8c3f-48e7-be82-bc57e708cd5e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.734570 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bhsr\" (UniqueName: \"kubernetes.io/projected/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-kube-api-access-6bhsr\") on node \"crc\" DevicePath \"\"" Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.734607 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:30:38 crc kubenswrapper[4840]: I1205 15:30:38.734625 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30a7c1b0-8c3f-48e7-be82-bc57e708cd5e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.039176 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" event={"ID":"30a7c1b0-8c3f-48e7-be82-bc57e708cd5e","Type":"ContainerDied","Data":"0468de1a62430cbe99d8d004e1b8824e63512e4d2917ef5e03f2815a0c7061ca"} Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.039216 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0468de1a62430cbe99d8d004e1b8824e63512e4d2917ef5e03f2815a0c7061ca" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.039227 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t788v" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.149120 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx"] Dec 05 15:30:39 crc kubenswrapper[4840]: E1205 15:30:39.149534 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a7c1b0-8c3f-48e7-be82-bc57e708cd5e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.149551 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a7c1b0-8c3f-48e7-be82-bc57e708cd5e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:30:39 crc kubenswrapper[4840]: E1205 15:30:39.149570 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b64b90-dd33-474c-a9b8-5a1d8c496cea" containerName="collect-profiles" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.149576 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b64b90-dd33-474c-a9b8-5a1d8c496cea" containerName="collect-profiles" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.149767 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="34b64b90-dd33-474c-a9b8-5a1d8c496cea" containerName="collect-profiles" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.149787 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a7c1b0-8c3f-48e7-be82-bc57e708cd5e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.150432 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.153011 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.153404 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.153497 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.153701 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.159672 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx"] Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.243232 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.243296 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.243441 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9rjr\" (UniqueName: \"kubernetes.io/projected/23c1423f-c01b-4a22-b2de-63e6a8646eed-kube-api-access-z9rjr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.344819 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.344904 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.345015 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9rjr\" (UniqueName: \"kubernetes.io/projected/23c1423f-c01b-4a22-b2de-63e6a8646eed-kube-api-access-z9rjr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.349123 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.349142 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.365624 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9rjr\" (UniqueName: \"kubernetes.io/projected/23c1423f-c01b-4a22-b2de-63e6a8646eed-kube-api-access-z9rjr\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:39 crc kubenswrapper[4840]: I1205 15:30:39.468146 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:30:40 crc kubenswrapper[4840]: I1205 15:30:40.054936 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx"] Dec 05 15:30:40 crc kubenswrapper[4840]: I1205 15:30:40.055679 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:30:41 crc kubenswrapper[4840]: I1205 15:30:41.056312 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" event={"ID":"23c1423f-c01b-4a22-b2de-63e6a8646eed","Type":"ContainerStarted","Data":"a819dfd85124e16f4c8fece2def3c8936b2fe3f2c323ecbf0d2ac349bda4be34"} Dec 05 15:30:41 crc kubenswrapper[4840]: I1205 15:30:41.056684 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" event={"ID":"23c1423f-c01b-4a22-b2de-63e6a8646eed","Type":"ContainerStarted","Data":"88844b053afd5f3215da360f9e0a6f33cc2ae3381249262f8b52fb90dad01f34"} Dec 05 15:30:41 crc kubenswrapper[4840]: I1205 15:30:41.069206 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" podStartSLOduration=1.579244001 podStartE2EDuration="2.069188045s" podCreationTimestamp="2025-12-05 15:30:39 +0000 UTC" firstStartedPulling="2025-12-05 15:30:40.05548815 +0000 UTC m=+1918.396550754" lastFinishedPulling="2025-12-05 15:30:40.545432184 +0000 UTC m=+1918.886494798" observedRunningTime="2025-12-05 15:30:41.069079712 +0000 UTC m=+1919.410142336" watchObservedRunningTime="2025-12-05 15:30:41.069188045 +0000 UTC m=+1919.410250659" Dec 05 15:31:10 crc kubenswrapper[4840]: I1205 15:31:10.041549 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-49rjk"] Dec 05 15:31:10 crc kubenswrapper[4840]: I1205 15:31:10.050633 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-49rjk"] Dec 05 15:31:10 crc kubenswrapper[4840]: I1205 15:31:10.076726 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe6183d0-57eb-47e8-8fe3-15cacaedfde3" path="/var/lib/kubelet/pods/fe6183d0-57eb-47e8-8fe3-15cacaedfde3/volumes" Dec 05 15:31:18 crc kubenswrapper[4840]: I1205 15:31:18.480404 4840 scope.go:117] "RemoveContainer" containerID="c918174995a3cd3f297ad0fd560d80a86376530b1c9c16d462e1bc0583c2d611" Dec 05 15:31:18 crc kubenswrapper[4840]: I1205 15:31:18.605309 4840 scope.go:117] "RemoveContainer" containerID="ef726b6aea293f80b3eda56e0a7d32b0a78fa9d15a000bc82bc76f2e62bc2bc5" Dec 05 15:31:18 crc kubenswrapper[4840]: I1205 15:31:18.645824 4840 scope.go:117] "RemoveContainer" containerID="bfe6a586db863e9a48d639405a417c4e1ca08c0214c92fe8d4150b6d3da3a689" Dec 05 15:31:31 crc kubenswrapper[4840]: I1205 15:31:31.586032 4840 generic.go:334] "Generic (PLEG): container finished" podID="23c1423f-c01b-4a22-b2de-63e6a8646eed" containerID="a819dfd85124e16f4c8fece2def3c8936b2fe3f2c323ecbf0d2ac349bda4be34" exitCode=0 Dec 05 15:31:31 crc kubenswrapper[4840]: I1205 15:31:31.586126 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" event={"ID":"23c1423f-c01b-4a22-b2de-63e6a8646eed","Type":"ContainerDied","Data":"a819dfd85124e16f4c8fece2def3c8936b2fe3f2c323ecbf0d2ac349bda4be34"} Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.329805 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.412613 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-inventory\") pod \"23c1423f-c01b-4a22-b2de-63e6a8646eed\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.412683 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9rjr\" (UniqueName: \"kubernetes.io/projected/23c1423f-c01b-4a22-b2de-63e6a8646eed-kube-api-access-z9rjr\") pod \"23c1423f-c01b-4a22-b2de-63e6a8646eed\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.412750 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-ssh-key\") pod \"23c1423f-c01b-4a22-b2de-63e6a8646eed\" (UID: \"23c1423f-c01b-4a22-b2de-63e6a8646eed\") " Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.418188 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23c1423f-c01b-4a22-b2de-63e6a8646eed-kube-api-access-z9rjr" (OuterVolumeSpecName: "kube-api-access-z9rjr") pod "23c1423f-c01b-4a22-b2de-63e6a8646eed" (UID: "23c1423f-c01b-4a22-b2de-63e6a8646eed"). InnerVolumeSpecName "kube-api-access-z9rjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.514528 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9rjr\" (UniqueName: \"kubernetes.io/projected/23c1423f-c01b-4a22-b2de-63e6a8646eed-kube-api-access-z9rjr\") on node \"crc\" DevicePath \"\"" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.579376 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "23c1423f-c01b-4a22-b2de-63e6a8646eed" (UID: "23c1423f-c01b-4a22-b2de-63e6a8646eed"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.580778 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-inventory" (OuterVolumeSpecName: "inventory") pod "23c1423f-c01b-4a22-b2de-63e6a8646eed" (UID: "23c1423f-c01b-4a22-b2de-63e6a8646eed"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.607984 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" event={"ID":"23c1423f-c01b-4a22-b2de-63e6a8646eed","Type":"ContainerDied","Data":"88844b053afd5f3215da360f9e0a6f33cc2ae3381249262f8b52fb90dad01f34"} Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.608021 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88844b053afd5f3215da360f9e0a6f33cc2ae3381249262f8b52fb90dad01f34" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.608085 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.616796 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.617008 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/23c1423f-c01b-4a22-b2de-63e6a8646eed-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.702606 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8sfl9"] Dec 05 15:31:33 crc kubenswrapper[4840]: E1205 15:31:33.702998 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c1423f-c01b-4a22-b2de-63e6a8646eed" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.703017 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c1423f-c01b-4a22-b2de-63e6a8646eed" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.703245 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="23c1423f-c01b-4a22-b2de-63e6a8646eed" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.703947 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.707902 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.708211 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.708365 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.708375 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.719458 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55ndn\" (UniqueName: \"kubernetes.io/projected/0e16306a-b5d0-468b-b0a2-b19ad5af4592-kube-api-access-55ndn\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.719540 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.719740 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.727667 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8sfl9"] Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.821190 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.821343 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55ndn\" (UniqueName: \"kubernetes.io/projected/0e16306a-b5d0-468b-b0a2-b19ad5af4592-kube-api-access-55ndn\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.821403 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.824938 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.824943 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:33 crc kubenswrapper[4840]: I1205 15:31:33.840325 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55ndn\" (UniqueName: \"kubernetes.io/projected/0e16306a-b5d0-468b-b0a2-b19ad5af4592-kube-api-access-55ndn\") pod \"ssh-known-hosts-edpm-deployment-8sfl9\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:34 crc kubenswrapper[4840]: I1205 15:31:34.039295 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:34 crc kubenswrapper[4840]: I1205 15:31:34.563615 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8sfl9"] Dec 05 15:31:34 crc kubenswrapper[4840]: I1205 15:31:34.618084 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" event={"ID":"0e16306a-b5d0-468b-b0a2-b19ad5af4592","Type":"ContainerStarted","Data":"ebe3028757aba10901459e4455910a77a27adde4b355bb92466e0c83655a8e7a"} Dec 05 15:31:35 crc kubenswrapper[4840]: I1205 15:31:35.630245 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" event={"ID":"0e16306a-b5d0-468b-b0a2-b19ad5af4592","Type":"ContainerStarted","Data":"891d4f7e4e35b98e0a33dedbb3e30e01b6a4c39ceaaea9f53cefc102c1bd916e"} Dec 05 15:31:35 crc kubenswrapper[4840]: I1205 15:31:35.647507 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" podStartSLOduration=1.978395598 podStartE2EDuration="2.647479524s" podCreationTimestamp="2025-12-05 15:31:33 +0000 UTC" firstStartedPulling="2025-12-05 15:31:34.571795374 +0000 UTC m=+1972.912857978" lastFinishedPulling="2025-12-05 15:31:35.24087929 +0000 UTC m=+1973.581941904" observedRunningTime="2025-12-05 15:31:35.645996712 +0000 UTC m=+1973.987059346" watchObservedRunningTime="2025-12-05 15:31:35.647479524 +0000 UTC m=+1973.988542148" Dec 05 15:31:43 crc kubenswrapper[4840]: I1205 15:31:43.742049 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" event={"ID":"0e16306a-b5d0-468b-b0a2-b19ad5af4592","Type":"ContainerDied","Data":"891d4f7e4e35b98e0a33dedbb3e30e01b6a4c39ceaaea9f53cefc102c1bd916e"} Dec 05 15:31:43 crc kubenswrapper[4840]: I1205 15:31:43.742010 4840 generic.go:334] "Generic (PLEG): container finished" podID="0e16306a-b5d0-468b-b0a2-b19ad5af4592" containerID="891d4f7e4e35b98e0a33dedbb3e30e01b6a4c39ceaaea9f53cefc102c1bd916e" exitCode=0 Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.548603 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.701068 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-inventory-0\") pod \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.701381 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-ssh-key-openstack-edpm-ipam\") pod \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.701452 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55ndn\" (UniqueName: \"kubernetes.io/projected/0e16306a-b5d0-468b-b0a2-b19ad5af4592-kube-api-access-55ndn\") pod \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\" (UID: \"0e16306a-b5d0-468b-b0a2-b19ad5af4592\") " Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.709746 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e16306a-b5d0-468b-b0a2-b19ad5af4592-kube-api-access-55ndn" (OuterVolumeSpecName: "kube-api-access-55ndn") pod "0e16306a-b5d0-468b-b0a2-b19ad5af4592" (UID: "0e16306a-b5d0-468b-b0a2-b19ad5af4592"). InnerVolumeSpecName "kube-api-access-55ndn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.730688 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0e16306a-b5d0-468b-b0a2-b19ad5af4592" (UID: "0e16306a-b5d0-468b-b0a2-b19ad5af4592"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.732046 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0e16306a-b5d0-468b-b0a2-b19ad5af4592" (UID: "0e16306a-b5d0-468b-b0a2-b19ad5af4592"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.764436 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" event={"ID":"0e16306a-b5d0-468b-b0a2-b19ad5af4592","Type":"ContainerDied","Data":"ebe3028757aba10901459e4455910a77a27adde4b355bb92466e0c83655a8e7a"} Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.764472 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebe3028757aba10901459e4455910a77a27adde4b355bb92466e0c83655a8e7a" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.764474 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8sfl9" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.806375 4840 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.806436 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e16306a-b5d0-468b-b0a2-b19ad5af4592-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.806450 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55ndn\" (UniqueName: \"kubernetes.io/projected/0e16306a-b5d0-468b-b0a2-b19ad5af4592-kube-api-access-55ndn\") on node \"crc\" DevicePath \"\"" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.844330 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf"] Dec 05 15:31:45 crc kubenswrapper[4840]: E1205 15:31:45.844770 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e16306a-b5d0-468b-b0a2-b19ad5af4592" containerName="ssh-known-hosts-edpm-deployment" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.844792 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e16306a-b5d0-468b-b0a2-b19ad5af4592" containerName="ssh-known-hosts-edpm-deployment" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.845061 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e16306a-b5d0-468b-b0a2-b19ad5af4592" containerName="ssh-known-hosts-edpm-deployment" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.845762 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.848717 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.848972 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.849261 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.849287 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:31:45 crc kubenswrapper[4840]: I1205 15:31:45.886725 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf"] Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.009559 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.009680 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.009705 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9l4v\" (UniqueName: \"kubernetes.io/projected/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-kube-api-access-h9l4v\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.110896 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.111003 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.111042 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9l4v\" (UniqueName: \"kubernetes.io/projected/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-kube-api-access-h9l4v\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.116248 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.116274 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.127824 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9l4v\" (UniqueName: \"kubernetes.io/projected/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-kube-api-access-h9l4v\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-thbrf\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:46 crc kubenswrapper[4840]: I1205 15:31:46.183009 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:31:47 crc kubenswrapper[4840]: I1205 15:31:47.203720 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf"] Dec 05 15:31:47 crc kubenswrapper[4840]: W1205 15:31:47.209089 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54e23b4a_dfba_45da_9197_cfbdcbd4ccfe.slice/crio-b8fb1adc0c6ff1158c30d04636fbfdb60cd12e05114c4e65d2e735fd65754007 WatchSource:0}: Error finding container b8fb1adc0c6ff1158c30d04636fbfdb60cd12e05114c4e65d2e735fd65754007: Status 404 returned error can't find the container with id b8fb1adc0c6ff1158c30d04636fbfdb60cd12e05114c4e65d2e735fd65754007 Dec 05 15:31:47 crc kubenswrapper[4840]: I1205 15:31:47.815198 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" event={"ID":"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe","Type":"ContainerStarted","Data":"b8fb1adc0c6ff1158c30d04636fbfdb60cd12e05114c4e65d2e735fd65754007"} Dec 05 15:31:49 crc kubenswrapper[4840]: I1205 15:31:49.110531 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" event={"ID":"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe","Type":"ContainerStarted","Data":"669023ac5a36b41e23278ad6ca54abc9e6e7767a58e54e4c3398ec48b6f668b6"} Dec 05 15:31:49 crc kubenswrapper[4840]: I1205 15:31:49.133631 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" podStartSLOduration=3.397026794 podStartE2EDuration="4.133610292s" podCreationTimestamp="2025-12-05 15:31:45 +0000 UTC" firstStartedPulling="2025-12-05 15:31:47.214752683 +0000 UTC m=+1985.555815297" lastFinishedPulling="2025-12-05 15:31:47.951336181 +0000 UTC m=+1986.292398795" observedRunningTime="2025-12-05 15:31:49.133199271 +0000 UTC m=+1987.474261885" watchObservedRunningTime="2025-12-05 15:31:49.133610292 +0000 UTC m=+1987.474672906" Dec 05 15:31:58 crc kubenswrapper[4840]: I1205 15:31:58.515098 4840 generic.go:334] "Generic (PLEG): container finished" podID="54e23b4a-dfba-45da-9197-cfbdcbd4ccfe" containerID="669023ac5a36b41e23278ad6ca54abc9e6e7767a58e54e4c3398ec48b6f668b6" exitCode=0 Dec 05 15:31:58 crc kubenswrapper[4840]: I1205 15:31:58.515187 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" event={"ID":"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe","Type":"ContainerDied","Data":"669023ac5a36b41e23278ad6ca54abc9e6e7767a58e54e4c3398ec48b6f668b6"} Dec 05 15:31:59 crc kubenswrapper[4840]: I1205 15:31:59.935376 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.066279 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-ssh-key\") pod \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.066393 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-inventory\") pod \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.066543 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9l4v\" (UniqueName: \"kubernetes.io/projected/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-kube-api-access-h9l4v\") pod \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\" (UID: \"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe\") " Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.076988 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-kube-api-access-h9l4v" (OuterVolumeSpecName: "kube-api-access-h9l4v") pod "54e23b4a-dfba-45da-9197-cfbdcbd4ccfe" (UID: "54e23b4a-dfba-45da-9197-cfbdcbd4ccfe"). InnerVolumeSpecName "kube-api-access-h9l4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.094611 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "54e23b4a-dfba-45da-9197-cfbdcbd4ccfe" (UID: "54e23b4a-dfba-45da-9197-cfbdcbd4ccfe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.096656 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-inventory" (OuterVolumeSpecName: "inventory") pod "54e23b4a-dfba-45da-9197-cfbdcbd4ccfe" (UID: "54e23b4a-dfba-45da-9197-cfbdcbd4ccfe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.169283 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9l4v\" (UniqueName: \"kubernetes.io/projected/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-kube-api-access-h9l4v\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.169326 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.169341 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54e23b4a-dfba-45da-9197-cfbdcbd4ccfe-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.533140 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" event={"ID":"54e23b4a-dfba-45da-9197-cfbdcbd4ccfe","Type":"ContainerDied","Data":"b8fb1adc0c6ff1158c30d04636fbfdb60cd12e05114c4e65d2e735fd65754007"} Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.533714 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8fb1adc0c6ff1158c30d04636fbfdb60cd12e05114c4e65d2e735fd65754007" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.533216 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-thbrf" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.609639 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww"] Dec 05 15:32:00 crc kubenswrapper[4840]: E1205 15:32:00.610041 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54e23b4a-dfba-45da-9197-cfbdcbd4ccfe" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.610061 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="54e23b4a-dfba-45da-9197-cfbdcbd4ccfe" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.610268 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="54e23b4a-dfba-45da-9197-cfbdcbd4ccfe" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.611075 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.614271 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.614482 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.615016 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.615200 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.630216 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww"] Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.678618 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.678765 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pbkc\" (UniqueName: \"kubernetes.io/projected/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-kube-api-access-8pbkc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.678795 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.780676 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.780829 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pbkc\" (UniqueName: \"kubernetes.io/projected/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-kube-api-access-8pbkc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.780877 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.786508 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.799076 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.804304 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pbkc\" (UniqueName: \"kubernetes.io/projected/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-kube-api-access-8pbkc\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-96xww\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:00 crc kubenswrapper[4840]: I1205 15:32:00.928079 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:01 crc kubenswrapper[4840]: I1205 15:32:01.480890 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww"] Dec 05 15:32:01 crc kubenswrapper[4840]: I1205 15:32:01.541262 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" event={"ID":"b0e3c935-42f8-456b-8870-a4ca2f9fce1d","Type":"ContainerStarted","Data":"6b0d8963673aa03e829583791209f1a0935f6c7ad7ad159ddf21fc21f481be74"} Dec 05 15:32:02 crc kubenswrapper[4840]: I1205 15:32:02.551458 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" event={"ID":"b0e3c935-42f8-456b-8870-a4ca2f9fce1d","Type":"ContainerStarted","Data":"c61c2bedc2bb55e1e4527ca6bd4e4c5160d2099348d7195c8a198c527a0e840a"} Dec 05 15:32:02 crc kubenswrapper[4840]: I1205 15:32:02.582110 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" podStartSLOduration=2.163371902 podStartE2EDuration="2.582088378s" podCreationTimestamp="2025-12-05 15:32:00 +0000 UTC" firstStartedPulling="2025-12-05 15:32:01.493355699 +0000 UTC m=+1999.834418313" lastFinishedPulling="2025-12-05 15:32:01.912072175 +0000 UTC m=+2000.253134789" observedRunningTime="2025-12-05 15:32:02.574850263 +0000 UTC m=+2000.915912887" watchObservedRunningTime="2025-12-05 15:32:02.582088378 +0000 UTC m=+2000.923151002" Dec 05 15:32:12 crc kubenswrapper[4840]: I1205 15:32:12.657883 4840 generic.go:334] "Generic (PLEG): container finished" podID="b0e3c935-42f8-456b-8870-a4ca2f9fce1d" containerID="c61c2bedc2bb55e1e4527ca6bd4e4c5160d2099348d7195c8a198c527a0e840a" exitCode=0 Dec 05 15:32:12 crc kubenswrapper[4840]: I1205 15:32:12.658085 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" event={"ID":"b0e3c935-42f8-456b-8870-a4ca2f9fce1d","Type":"ContainerDied","Data":"c61c2bedc2bb55e1e4527ca6bd4e4c5160d2099348d7195c8a198c527a0e840a"} Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.074685 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.083766 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-inventory\") pod \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.083992 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-ssh-key\") pod \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.084137 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pbkc\" (UniqueName: \"kubernetes.io/projected/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-kube-api-access-8pbkc\") pod \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\" (UID: \"b0e3c935-42f8-456b-8870-a4ca2f9fce1d\") " Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.097341 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-kube-api-access-8pbkc" (OuterVolumeSpecName: "kube-api-access-8pbkc") pod "b0e3c935-42f8-456b-8870-a4ca2f9fce1d" (UID: "b0e3c935-42f8-456b-8870-a4ca2f9fce1d"). InnerVolumeSpecName "kube-api-access-8pbkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.115518 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-inventory" (OuterVolumeSpecName: "inventory") pod "b0e3c935-42f8-456b-8870-a4ca2f9fce1d" (UID: "b0e3c935-42f8-456b-8870-a4ca2f9fce1d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.120138 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b0e3c935-42f8-456b-8870-a4ca2f9fce1d" (UID: "b0e3c935-42f8-456b-8870-a4ca2f9fce1d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.186365 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.186409 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pbkc\" (UniqueName: \"kubernetes.io/projected/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-kube-api-access-8pbkc\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.186424 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0e3c935-42f8-456b-8870-a4ca2f9fce1d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.675945 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" event={"ID":"b0e3c935-42f8-456b-8870-a4ca2f9fce1d","Type":"ContainerDied","Data":"6b0d8963673aa03e829583791209f1a0935f6c7ad7ad159ddf21fc21f481be74"} Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.675994 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b0d8963673aa03e829583791209f1a0935f6c7ad7ad159ddf21fc21f481be74" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.676011 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-96xww" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.824373 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv"] Dec 05 15:32:14 crc kubenswrapper[4840]: E1205 15:32:14.825118 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0e3c935-42f8-456b-8870-a4ca2f9fce1d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.825142 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0e3c935-42f8-456b-8870-a4ca2f9fce1d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.825341 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0e3c935-42f8-456b-8870-a4ca2f9fce1d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.826012 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.829084 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.829304 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.829459 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.829705 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.829845 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.830133 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.830292 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.830429 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.847904 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv"] Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.898937 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.898996 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899094 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899134 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899170 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899291 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899383 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899446 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899493 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x444\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-kube-api-access-7x444\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899520 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899544 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899606 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899778 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:14 crc kubenswrapper[4840]: I1205 15:32:14.899855 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.000949 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001318 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001350 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001371 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001407 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001435 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001466 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001494 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001517 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x444\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-kube-api-access-7x444\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001567 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001586 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001622 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001659 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.001678 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.006512 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.006711 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.008608 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.010168 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.010225 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.010350 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.011000 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.011001 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.012464 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.012954 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.013661 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.014592 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.016623 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.028052 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x444\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-kube-api-access-7x444\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-97vkv\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.198545 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:15 crc kubenswrapper[4840]: I1205 15:32:15.705688 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv"] Dec 05 15:32:16 crc kubenswrapper[4840]: I1205 15:32:16.693559 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" event={"ID":"d89744d6-5d83-4152-8a17-ab5bddf86ad9","Type":"ContainerStarted","Data":"72d69986b1a52f514d7247998beb1948028681e0ccd6504bc0fc52336528cb9b"} Dec 05 15:32:16 crc kubenswrapper[4840]: I1205 15:32:16.693909 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" event={"ID":"d89744d6-5d83-4152-8a17-ab5bddf86ad9","Type":"ContainerStarted","Data":"10cc6e19f7eff2be46f20631ff969a92f83fa345ff66fe173023c33b7dee1fc9"} Dec 05 15:32:16 crc kubenswrapper[4840]: I1205 15:32:16.725071 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" podStartSLOduration=2.307230922 podStartE2EDuration="2.725048553s" podCreationTimestamp="2025-12-05 15:32:14 +0000 UTC" firstStartedPulling="2025-12-05 15:32:15.720609651 +0000 UTC m=+2014.061672265" lastFinishedPulling="2025-12-05 15:32:16.138427292 +0000 UTC m=+2014.479489896" observedRunningTime="2025-12-05 15:32:16.716965154 +0000 UTC m=+2015.058027788" watchObservedRunningTime="2025-12-05 15:32:16.725048553 +0000 UTC m=+2015.066111167" Dec 05 15:32:19 crc kubenswrapper[4840]: I1205 15:32:19.471771 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:32:19 crc kubenswrapper[4840]: I1205 15:32:19.472580 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.007047 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gl2qq"] Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.015322 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.023386 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gl2qq"] Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.096245 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g7dm\" (UniqueName: \"kubernetes.io/projected/5efc8997-de80-4d92-bed4-b91a75d3dc61-kube-api-access-2g7dm\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.096320 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-utilities\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.096439 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-catalog-content\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.198683 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g7dm\" (UniqueName: \"kubernetes.io/projected/5efc8997-de80-4d92-bed4-b91a75d3dc61-kube-api-access-2g7dm\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.198988 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-utilities\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.199055 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-catalog-content\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.199510 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-utilities\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.199621 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-catalog-content\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.219881 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g7dm\" (UniqueName: \"kubernetes.io/projected/5efc8997-de80-4d92-bed4-b91a75d3dc61-kube-api-access-2g7dm\") pod \"redhat-operators-gl2qq\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.341229 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.898076 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gl2qq"] Dec 05 15:32:47 crc kubenswrapper[4840]: I1205 15:32:47.964560 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gl2qq" event={"ID":"5efc8997-de80-4d92-bed4-b91a75d3dc61","Type":"ContainerStarted","Data":"f7cf4173818fe42cc52d58bb52a8c824c7037b87278212237eaf1f72af7c6031"} Dec 05 15:32:48 crc kubenswrapper[4840]: I1205 15:32:48.976037 4840 generic.go:334] "Generic (PLEG): container finished" podID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerID="5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb" exitCode=0 Dec 05 15:32:48 crc kubenswrapper[4840]: I1205 15:32:48.976230 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gl2qq" event={"ID":"5efc8997-de80-4d92-bed4-b91a75d3dc61","Type":"ContainerDied","Data":"5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb"} Dec 05 15:32:49 crc kubenswrapper[4840]: I1205 15:32:49.471668 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:32:49 crc kubenswrapper[4840]: I1205 15:32:49.472107 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:32:49 crc kubenswrapper[4840]: I1205 15:32:49.986992 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gl2qq" event={"ID":"5efc8997-de80-4d92-bed4-b91a75d3dc61","Type":"ContainerStarted","Data":"5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7"} Dec 05 15:32:53 crc kubenswrapper[4840]: I1205 15:32:53.013051 4840 generic.go:334] "Generic (PLEG): container finished" podID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerID="5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7" exitCode=0 Dec 05 15:32:53 crc kubenswrapper[4840]: I1205 15:32:53.013124 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gl2qq" event={"ID":"5efc8997-de80-4d92-bed4-b91a75d3dc61","Type":"ContainerDied","Data":"5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7"} Dec 05 15:32:55 crc kubenswrapper[4840]: I1205 15:32:55.033671 4840 generic.go:334] "Generic (PLEG): container finished" podID="d89744d6-5d83-4152-8a17-ab5bddf86ad9" containerID="72d69986b1a52f514d7247998beb1948028681e0ccd6504bc0fc52336528cb9b" exitCode=0 Dec 05 15:32:55 crc kubenswrapper[4840]: I1205 15:32:55.033760 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" event={"ID":"d89744d6-5d83-4152-8a17-ab5bddf86ad9","Type":"ContainerDied","Data":"72d69986b1a52f514d7247998beb1948028681e0ccd6504bc0fc52336528cb9b"} Dec 05 15:32:55 crc kubenswrapper[4840]: I1205 15:32:55.036781 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gl2qq" event={"ID":"5efc8997-de80-4d92-bed4-b91a75d3dc61","Type":"ContainerStarted","Data":"61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e"} Dec 05 15:32:55 crc kubenswrapper[4840]: I1205 15:32:55.078098 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gl2qq" podStartSLOduration=3.973889097 podStartE2EDuration="9.078076632s" podCreationTimestamp="2025-12-05 15:32:46 +0000 UTC" firstStartedPulling="2025-12-05 15:32:48.979057207 +0000 UTC m=+2047.320119821" lastFinishedPulling="2025-12-05 15:32:54.083244742 +0000 UTC m=+2052.424307356" observedRunningTime="2025-12-05 15:32:55.070650952 +0000 UTC m=+2053.411713566" watchObservedRunningTime="2025-12-05 15:32:55.078076632 +0000 UTC m=+2053.419139236" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.491284 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.615852 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-inventory\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.615959 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x444\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-kube-api-access-7x444\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.615983 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.616009 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ovn-combined-ca-bundle\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.616025 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-neutron-metadata-combined-ca-bundle\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.616047 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.616073 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-repo-setup-combined-ca-bundle\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.616121 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-bootstrap-combined-ca-bundle\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.616201 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.616359 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.617189 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-telemetry-combined-ca-bundle\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.617499 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-libvirt-combined-ca-bundle\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.617566 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ssh-key\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.617615 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-nova-combined-ca-bundle\") pod \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\" (UID: \"d89744d6-5d83-4152-8a17-ab5bddf86ad9\") " Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.624015 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.624346 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.624402 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.625215 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.625208 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.625263 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.625833 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.626199 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.627524 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-kube-api-access-7x444" (OuterVolumeSpecName: "kube-api-access-7x444") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "kube-api-access-7x444". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.628292 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.630209 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.631228 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.656058 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.656446 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-inventory" (OuterVolumeSpecName: "inventory") pod "d89744d6-5d83-4152-8a17-ab5bddf86ad9" (UID: "d89744d6-5d83-4152-8a17-ab5bddf86ad9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719834 4840 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719895 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719909 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x444\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-kube-api-access-7x444\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719923 4840 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719939 4840 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719952 4840 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719965 4840 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.719989 4840 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.720004 4840 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.720017 4840 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.720030 4840 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/d89744d6-5d83-4152-8a17-ab5bddf86ad9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.720044 4840 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.720058 4840 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:56 crc kubenswrapper[4840]: I1205 15:32:56.720083 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d89744d6-5d83-4152-8a17-ab5bddf86ad9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.103160 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" event={"ID":"d89744d6-5d83-4152-8a17-ab5bddf86ad9","Type":"ContainerDied","Data":"10cc6e19f7eff2be46f20631ff969a92f83fa345ff66fe173023c33b7dee1fc9"} Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.103218 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10cc6e19f7eff2be46f20631ff969a92f83fa345ff66fe173023c33b7dee1fc9" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.103273 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-97vkv" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.209477 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc"] Dec 05 15:32:57 crc kubenswrapper[4840]: E1205 15:32:57.210184 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d89744d6-5d83-4152-8a17-ab5bddf86ad9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.210319 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d89744d6-5d83-4152-8a17-ab5bddf86ad9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.210805 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d89744d6-5d83-4152-8a17-ab5bddf86ad9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.211889 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.217499 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.217924 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.218169 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.218372 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.218546 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.230110 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc"] Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.333959 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.334058 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjznr\" (UniqueName: \"kubernetes.io/projected/d0c216f7-ce43-4852-9788-e1f5e5705ec4-kube-api-access-sjznr\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.334141 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.334308 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.334428 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.342384 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.342427 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.492350 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.492456 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjznr\" (UniqueName: \"kubernetes.io/projected/d0c216f7-ce43-4852-9788-e1f5e5705ec4-kube-api-access-sjznr\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.492580 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.492646 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.492695 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.501012 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.505442 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.508615 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.510020 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.512416 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjznr\" (UniqueName: \"kubernetes.io/projected/d0c216f7-ce43-4852-9788-e1f5e5705ec4-kube-api-access-sjznr\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-sclgc\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:57 crc kubenswrapper[4840]: I1205 15:32:57.534294 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:32:58 crc kubenswrapper[4840]: I1205 15:32:58.099890 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc"] Dec 05 15:32:58 crc kubenswrapper[4840]: I1205 15:32:58.388477 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gl2qq" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="registry-server" probeResult="failure" output=< Dec 05 15:32:58 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:32:58 crc kubenswrapper[4840]: > Dec 05 15:32:59 crc kubenswrapper[4840]: I1205 15:32:59.121166 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" event={"ID":"d0c216f7-ce43-4852-9788-e1f5e5705ec4","Type":"ContainerStarted","Data":"9e9a99f073ef64abeb5cbf158bbd99009fada3f2a4fc97729a883678bb3405da"} Dec 05 15:32:59 crc kubenswrapper[4840]: I1205 15:32:59.122722 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" event={"ID":"d0c216f7-ce43-4852-9788-e1f5e5705ec4","Type":"ContainerStarted","Data":"66d1b140fe0d877e534d373925ebdbc74ef8ccb8ac330868ea8786be3b93691a"} Dec 05 15:32:59 crc kubenswrapper[4840]: I1205 15:32:59.138353 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" podStartSLOduration=1.501909834 podStartE2EDuration="2.138327225s" podCreationTimestamp="2025-12-05 15:32:57 +0000 UTC" firstStartedPulling="2025-12-05 15:32:58.109603856 +0000 UTC m=+2056.450666470" lastFinishedPulling="2025-12-05 15:32:58.746021247 +0000 UTC m=+2057.087083861" observedRunningTime="2025-12-05 15:32:59.135189556 +0000 UTC m=+2057.476252190" watchObservedRunningTime="2025-12-05 15:32:59.138327225 +0000 UTC m=+2057.479389839" Dec 05 15:33:07 crc kubenswrapper[4840]: I1205 15:33:07.416327 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:33:07 crc kubenswrapper[4840]: I1205 15:33:07.483306 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:33:07 crc kubenswrapper[4840]: I1205 15:33:07.658097 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gl2qq"] Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.224822 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gl2qq" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="registry-server" containerID="cri-o://61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e" gracePeriod=2 Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.641318 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.753970 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g7dm\" (UniqueName: \"kubernetes.io/projected/5efc8997-de80-4d92-bed4-b91a75d3dc61-kube-api-access-2g7dm\") pod \"5efc8997-de80-4d92-bed4-b91a75d3dc61\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.754083 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-utilities\") pod \"5efc8997-de80-4d92-bed4-b91a75d3dc61\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.754130 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-catalog-content\") pod \"5efc8997-de80-4d92-bed4-b91a75d3dc61\" (UID: \"5efc8997-de80-4d92-bed4-b91a75d3dc61\") " Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.755223 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-utilities" (OuterVolumeSpecName: "utilities") pod "5efc8997-de80-4d92-bed4-b91a75d3dc61" (UID: "5efc8997-de80-4d92-bed4-b91a75d3dc61"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.776216 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5efc8997-de80-4d92-bed4-b91a75d3dc61-kube-api-access-2g7dm" (OuterVolumeSpecName: "kube-api-access-2g7dm") pod "5efc8997-de80-4d92-bed4-b91a75d3dc61" (UID: "5efc8997-de80-4d92-bed4-b91a75d3dc61"). InnerVolumeSpecName "kube-api-access-2g7dm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.856991 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g7dm\" (UniqueName: \"kubernetes.io/projected/5efc8997-de80-4d92-bed4-b91a75d3dc61-kube-api-access-2g7dm\") on node \"crc\" DevicePath \"\"" Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.857033 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.864658 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5efc8997-de80-4d92-bed4-b91a75d3dc61" (UID: "5efc8997-de80-4d92-bed4-b91a75d3dc61"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:33:09 crc kubenswrapper[4840]: I1205 15:33:09.958646 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5efc8997-de80-4d92-bed4-b91a75d3dc61-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.245283 4840 generic.go:334] "Generic (PLEG): container finished" podID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerID="61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e" exitCode=0 Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.245348 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gl2qq" event={"ID":"5efc8997-de80-4d92-bed4-b91a75d3dc61","Type":"ContainerDied","Data":"61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e"} Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.245377 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gl2qq" event={"ID":"5efc8997-de80-4d92-bed4-b91a75d3dc61","Type":"ContainerDied","Data":"f7cf4173818fe42cc52d58bb52a8c824c7037b87278212237eaf1f72af7c6031"} Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.245396 4840 scope.go:117] "RemoveContainer" containerID="61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.245701 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gl2qq" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.282782 4840 scope.go:117] "RemoveContainer" containerID="5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.283789 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gl2qq"] Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.291435 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gl2qq"] Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.309056 4840 scope.go:117] "RemoveContainer" containerID="5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.349376 4840 scope.go:117] "RemoveContainer" containerID="61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e" Dec 05 15:33:10 crc kubenswrapper[4840]: E1205 15:33:10.349978 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e\": container with ID starting with 61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e not found: ID does not exist" containerID="61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.350045 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e"} err="failed to get container status \"61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e\": rpc error: code = NotFound desc = could not find container \"61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e\": container with ID starting with 61a7ada4da4f923cc112d49fcfd93ba3b1d102871f063043d79a07476b1d7e1e not found: ID does not exist" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.350077 4840 scope.go:117] "RemoveContainer" containerID="5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7" Dec 05 15:33:10 crc kubenswrapper[4840]: E1205 15:33:10.350460 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7\": container with ID starting with 5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7 not found: ID does not exist" containerID="5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.350483 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7"} err="failed to get container status \"5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7\": rpc error: code = NotFound desc = could not find container \"5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7\": container with ID starting with 5183285b95722ead1a43dd4956bd00d3563b210dca928a24cb67a910f8572dd7 not found: ID does not exist" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.350496 4840 scope.go:117] "RemoveContainer" containerID="5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb" Dec 05 15:33:10 crc kubenswrapper[4840]: E1205 15:33:10.350671 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb\": container with ID starting with 5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb not found: ID does not exist" containerID="5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb" Dec 05 15:33:10 crc kubenswrapper[4840]: I1205 15:33:10.350689 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb"} err="failed to get container status \"5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb\": rpc error: code = NotFound desc = could not find container \"5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb\": container with ID starting with 5b12cf1f7b880fa08159542023d631e9b0b6b23c03ce37022794591c31fb17eb not found: ID does not exist" Dec 05 15:33:12 crc kubenswrapper[4840]: I1205 15:33:12.078108 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" path="/var/lib/kubelet/pods/5efc8997-de80-4d92-bed4-b91a75d3dc61/volumes" Dec 05 15:33:19 crc kubenswrapper[4840]: I1205 15:33:19.472268 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:33:19 crc kubenswrapper[4840]: I1205 15:33:19.472792 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:33:19 crc kubenswrapper[4840]: I1205 15:33:19.472841 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:33:19 crc kubenswrapper[4840]: I1205 15:33:19.473644 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4326a1eba95fff009410270bad695965f3fad46dfb07fed1bafd6c12a2802aa3"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:33:19 crc kubenswrapper[4840]: I1205 15:33:19.473696 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://4326a1eba95fff009410270bad695965f3fad46dfb07fed1bafd6c12a2802aa3" gracePeriod=600 Dec 05 15:33:20 crc kubenswrapper[4840]: I1205 15:33:20.339966 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="4326a1eba95fff009410270bad695965f3fad46dfb07fed1bafd6c12a2802aa3" exitCode=0 Dec 05 15:33:20 crc kubenswrapper[4840]: I1205 15:33:20.340841 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"4326a1eba95fff009410270bad695965f3fad46dfb07fed1bafd6c12a2802aa3"} Dec 05 15:33:20 crc kubenswrapper[4840]: I1205 15:33:20.340986 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c"} Dec 05 15:33:20 crc kubenswrapper[4840]: I1205 15:33:20.341042 4840 scope.go:117] "RemoveContainer" containerID="9087e00921c7c0746b6dde6161dd2c8a03a4c7459236d23bbe8965853de5f49b" Dec 05 15:33:58 crc kubenswrapper[4840]: I1205 15:33:58.700377 4840 generic.go:334] "Generic (PLEG): container finished" podID="d0c216f7-ce43-4852-9788-e1f5e5705ec4" containerID="9e9a99f073ef64abeb5cbf158bbd99009fada3f2a4fc97729a883678bb3405da" exitCode=0 Dec 05 15:33:58 crc kubenswrapper[4840]: I1205 15:33:58.700474 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" event={"ID":"d0c216f7-ce43-4852-9788-e1f5e5705ec4","Type":"ContainerDied","Data":"9e9a99f073ef64abeb5cbf158bbd99009fada3f2a4fc97729a883678bb3405da"} Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.144338 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.281996 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-inventory\") pod \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.282163 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjznr\" (UniqueName: \"kubernetes.io/projected/d0c216f7-ce43-4852-9788-e1f5e5705ec4-kube-api-access-sjznr\") pod \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.282373 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovn-combined-ca-bundle\") pod \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.282408 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ssh-key\") pod \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.282439 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovncontroller-config-0\") pod \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\" (UID: \"d0c216f7-ce43-4852-9788-e1f5e5705ec4\") " Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.289795 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "d0c216f7-ce43-4852-9788-e1f5e5705ec4" (UID: "d0c216f7-ce43-4852-9788-e1f5e5705ec4"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.295139 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0c216f7-ce43-4852-9788-e1f5e5705ec4-kube-api-access-sjznr" (OuterVolumeSpecName: "kube-api-access-sjznr") pod "d0c216f7-ce43-4852-9788-e1f5e5705ec4" (UID: "d0c216f7-ce43-4852-9788-e1f5e5705ec4"). InnerVolumeSpecName "kube-api-access-sjznr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.317553 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-inventory" (OuterVolumeSpecName: "inventory") pod "d0c216f7-ce43-4852-9788-e1f5e5705ec4" (UID: "d0c216f7-ce43-4852-9788-e1f5e5705ec4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.334268 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d0c216f7-ce43-4852-9788-e1f5e5705ec4" (UID: "d0c216f7-ce43-4852-9788-e1f5e5705ec4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.335573 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "d0c216f7-ce43-4852-9788-e1f5e5705ec4" (UID: "d0c216f7-ce43-4852-9788-e1f5e5705ec4"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.498613 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.499035 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjznr\" (UniqueName: \"kubernetes.io/projected/d0c216f7-ce43-4852-9788-e1f5e5705ec4-kube-api-access-sjznr\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.499105 4840 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.499169 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.499229 4840 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/d0c216f7-ce43-4852-9788-e1f5e5705ec4-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.721214 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" event={"ID":"d0c216f7-ce43-4852-9788-e1f5e5705ec4","Type":"ContainerDied","Data":"66d1b140fe0d877e534d373925ebdbc74ef8ccb8ac330868ea8786be3b93691a"} Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.721255 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66d1b140fe0d877e534d373925ebdbc74ef8ccb8ac330868ea8786be3b93691a" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.721319 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-sclgc" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.918431 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp"] Dec 05 15:34:00 crc kubenswrapper[4840]: E1205 15:34:00.918826 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="registry-server" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.918843 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="registry-server" Dec 05 15:34:00 crc kubenswrapper[4840]: E1205 15:34:00.918882 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0c216f7-ce43-4852-9788-e1f5e5705ec4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.918890 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0c216f7-ce43-4852-9788-e1f5e5705ec4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 15:34:00 crc kubenswrapper[4840]: E1205 15:34:00.918903 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="extract-utilities" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.918910 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="extract-utilities" Dec 05 15:34:00 crc kubenswrapper[4840]: E1205 15:34:00.918925 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="extract-content" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.918931 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="extract-content" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.919108 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="5efc8997-de80-4d92-bed4-b91a75d3dc61" containerName="registry-server" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.919130 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0c216f7-ce43-4852-9788-e1f5e5705ec4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.919744 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.924561 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.924839 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.924999 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.925145 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.925297 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.926930 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:34:00 crc kubenswrapper[4840]: I1205 15:34:00.932150 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp"] Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.061922 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.062257 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.062345 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4zwn\" (UniqueName: \"kubernetes.io/projected/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-kube-api-access-l4zwn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.062388 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.062716 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.062920 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.164673 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.165891 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.166078 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4zwn\" (UniqueName: \"kubernetes.io/projected/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-kube-api-access-l4zwn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.166285 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.166378 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.166498 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.172399 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.172536 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.172834 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.174076 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.179850 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.185517 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4zwn\" (UniqueName: \"kubernetes.io/projected/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-kube-api-access-l4zwn\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.240883 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:01 crc kubenswrapper[4840]: I1205 15:34:01.769735 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp"] Dec 05 15:34:02 crc kubenswrapper[4840]: I1205 15:34:02.739696 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" event={"ID":"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b","Type":"ContainerStarted","Data":"4393bd88ae0d953bc7e4077097d748954dc63666de2f7b6def934ad8517cb5df"} Dec 05 15:34:02 crc kubenswrapper[4840]: I1205 15:34:02.740516 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" event={"ID":"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b","Type":"ContainerStarted","Data":"1dbc3cb670d041720a95b5e02b1ef8bb086e2b67427a35c6253e46201a32ea5c"} Dec 05 15:34:02 crc kubenswrapper[4840]: I1205 15:34:02.762492 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" podStartSLOduration=2.238724702 podStartE2EDuration="2.762470133s" podCreationTimestamp="2025-12-05 15:34:00 +0000 UTC" firstStartedPulling="2025-12-05 15:34:01.776379379 +0000 UTC m=+2120.117441993" lastFinishedPulling="2025-12-05 15:34:02.3001248 +0000 UTC m=+2120.641187424" observedRunningTime="2025-12-05 15:34:02.756545205 +0000 UTC m=+2121.097607819" watchObservedRunningTime="2025-12-05 15:34:02.762470133 +0000 UTC m=+2121.103532747" Dec 05 15:34:47 crc kubenswrapper[4840]: I1205 15:34:47.174590 4840 generic.go:334] "Generic (PLEG): container finished" podID="e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" containerID="4393bd88ae0d953bc7e4077097d748954dc63666de2f7b6def934ad8517cb5df" exitCode=0 Dec 05 15:34:47 crc kubenswrapper[4840]: I1205 15:34:47.174710 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" event={"ID":"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b","Type":"ContainerDied","Data":"4393bd88ae0d953bc7e4077097d748954dc63666de2f7b6def934ad8517cb5df"} Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.619842 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.688502 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-nova-metadata-neutron-config-0\") pod \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.688660 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4zwn\" (UniqueName: \"kubernetes.io/projected/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-kube-api-access-l4zwn\") pod \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.688853 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-ovn-metadata-agent-neutron-config-0\") pod \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.688917 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-ssh-key\") pod \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.688977 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-metadata-combined-ca-bundle\") pod \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.689097 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-inventory\") pod \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\" (UID: \"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b\") " Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.696141 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-kube-api-access-l4zwn" (OuterVolumeSpecName: "kube-api-access-l4zwn") pod "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" (UID: "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b"). InnerVolumeSpecName "kube-api-access-l4zwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.707798 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" (UID: "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.718040 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-inventory" (OuterVolumeSpecName: "inventory") pod "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" (UID: "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.721612 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" (UID: "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.724283 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" (UID: "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.726647 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" (UID: "e9a9f7dd-12db-447d-a9d9-f279b5f72f5b"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.790831 4840 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.790889 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.790907 4840 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.790921 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.790934 4840 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:48 crc kubenswrapper[4840]: I1205 15:34:48.790944 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4zwn\" (UniqueName: \"kubernetes.io/projected/e9a9f7dd-12db-447d-a9d9-f279b5f72f5b-kube-api-access-l4zwn\") on node \"crc\" DevicePath \"\"" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.194701 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" event={"ID":"e9a9f7dd-12db-447d-a9d9-f279b5f72f5b","Type":"ContainerDied","Data":"1dbc3cb670d041720a95b5e02b1ef8bb086e2b67427a35c6253e46201a32ea5c"} Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.194767 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1dbc3cb670d041720a95b5e02b1ef8bb086e2b67427a35c6253e46201a32ea5c" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.194819 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.290239 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk"] Dec 05 15:34:49 crc kubenswrapper[4840]: E1205 15:34:49.291128 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.291156 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.291414 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a9f7dd-12db-447d-a9d9-f279b5f72f5b" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.292262 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.300795 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.300975 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.300994 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.301210 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.301422 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.302190 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk"] Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.404521 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.404626 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.404697 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp4x7\" (UniqueName: \"kubernetes.io/projected/453e239f-2acb-42cb-a617-35975fb5437a-kube-api-access-kp4x7\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.404802 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.404886 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.506452 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.506527 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.506581 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp4x7\" (UniqueName: \"kubernetes.io/projected/453e239f-2acb-42cb-a617-35975fb5437a-kube-api-access-kp4x7\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.506722 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.506768 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.511050 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.511414 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.511704 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.512987 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.524372 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp4x7\" (UniqueName: \"kubernetes.io/projected/453e239f-2acb-42cb-a617-35975fb5437a-kube-api-access-kp4x7\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:49 crc kubenswrapper[4840]: I1205 15:34:49.617962 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:34:50 crc kubenswrapper[4840]: I1205 15:34:50.133921 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk"] Dec 05 15:34:50 crc kubenswrapper[4840]: W1205 15:34:50.137199 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod453e239f_2acb_42cb_a617_35975fb5437a.slice/crio-0b5d08d92cb2fb542c8fb9dd523fd33838cb58e26d09d4108b73b317e7670aab WatchSource:0}: Error finding container 0b5d08d92cb2fb542c8fb9dd523fd33838cb58e26d09d4108b73b317e7670aab: Status 404 returned error can't find the container with id 0b5d08d92cb2fb542c8fb9dd523fd33838cb58e26d09d4108b73b317e7670aab Dec 05 15:34:50 crc kubenswrapper[4840]: I1205 15:34:50.202975 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" event={"ID":"453e239f-2acb-42cb-a617-35975fb5437a","Type":"ContainerStarted","Data":"0b5d08d92cb2fb542c8fb9dd523fd33838cb58e26d09d4108b73b317e7670aab"} Dec 05 15:34:51 crc kubenswrapper[4840]: I1205 15:34:51.214020 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" event={"ID":"453e239f-2acb-42cb-a617-35975fb5437a","Type":"ContainerStarted","Data":"210e1141c82447893d9178675b116bf3749dd8d1e9aa40cb3c6a587138d495ac"} Dec 05 15:34:51 crc kubenswrapper[4840]: I1205 15:34:51.248184 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" podStartSLOduration=1.847670827 podStartE2EDuration="2.248133811s" podCreationTimestamp="2025-12-05 15:34:49 +0000 UTC" firstStartedPulling="2025-12-05 15:34:50.139574421 +0000 UTC m=+2168.480637035" lastFinishedPulling="2025-12-05 15:34:50.540037405 +0000 UTC m=+2168.881100019" observedRunningTime="2025-12-05 15:34:51.232067825 +0000 UTC m=+2169.573130449" watchObservedRunningTime="2025-12-05 15:34:51.248133811 +0000 UTC m=+2169.589196445" Dec 05 15:35:19 crc kubenswrapper[4840]: I1205 15:35:19.471455 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:35:19 crc kubenswrapper[4840]: I1205 15:35:19.471942 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:35:32 crc kubenswrapper[4840]: I1205 15:35:32.873526 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f9l98"] Dec 05 15:35:32 crc kubenswrapper[4840]: I1205 15:35:32.877004 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:32 crc kubenswrapper[4840]: I1205 15:35:32.890619 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f9l98"] Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.002987 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5x9q\" (UniqueName: \"kubernetes.io/projected/ccdb6da2-a3e0-430b-bd92-26fcd4425773-kube-api-access-w5x9q\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.003759 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-catalog-content\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.003826 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-utilities\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.105736 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-catalog-content\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.105791 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-utilities\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.105956 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5x9q\" (UniqueName: \"kubernetes.io/projected/ccdb6da2-a3e0-430b-bd92-26fcd4425773-kube-api-access-w5x9q\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.106747 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-utilities\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.106819 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-catalog-content\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.128690 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5x9q\" (UniqueName: \"kubernetes.io/projected/ccdb6da2-a3e0-430b-bd92-26fcd4425773-kube-api-access-w5x9q\") pod \"community-operators-f9l98\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:33 crc kubenswrapper[4840]: I1205 15:35:33.360560 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:34 crc kubenswrapper[4840]: I1205 15:35:34.111069 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f9l98"] Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.107126 4840 generic.go:334] "Generic (PLEG): container finished" podID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerID="955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69" exitCode=0 Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.107426 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9l98" event={"ID":"ccdb6da2-a3e0-430b-bd92-26fcd4425773","Type":"ContainerDied","Data":"955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69"} Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.107458 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9l98" event={"ID":"ccdb6da2-a3e0-430b-bd92-26fcd4425773","Type":"ContainerStarted","Data":"fffaf75c47d0769f75f84de8207105376a53f3ea693441f54710e871e88f8d19"} Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.474010 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hqdsf"] Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.476252 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.484519 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hqdsf"] Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.648650 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkd8w\" (UniqueName: \"kubernetes.io/projected/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-kube-api-access-mkd8w\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.648726 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-utilities\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.649644 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-catalog-content\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.751140 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-utilities\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.751496 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-catalog-content\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.751668 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkd8w\" (UniqueName: \"kubernetes.io/projected/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-kube-api-access-mkd8w\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.751715 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-utilities\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.752080 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-catalog-content\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.777188 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkd8w\" (UniqueName: \"kubernetes.io/projected/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-kube-api-access-mkd8w\") pod \"certified-operators-hqdsf\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:35 crc kubenswrapper[4840]: I1205 15:35:35.794307 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:36 crc kubenswrapper[4840]: I1205 15:35:36.539462 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hqdsf"] Dec 05 15:35:36 crc kubenswrapper[4840]: W1205 15:35:36.552681 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc71a6d3c_ec2f_42f8_9b0d_ab5765412e1a.slice/crio-9e30401e961422915eec2bb095a77cf9493d7ce6e35733d23e5ef43ef6ca39b0 WatchSource:0}: Error finding container 9e30401e961422915eec2bb095a77cf9493d7ce6e35733d23e5ef43ef6ca39b0: Status 404 returned error can't find the container with id 9e30401e961422915eec2bb095a77cf9493d7ce6e35733d23e5ef43ef6ca39b0 Dec 05 15:35:37 crc kubenswrapper[4840]: I1205 15:35:37.188990 4840 generic.go:334] "Generic (PLEG): container finished" podID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerID="2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8" exitCode=0 Dec 05 15:35:37 crc kubenswrapper[4840]: I1205 15:35:37.189062 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqdsf" event={"ID":"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a","Type":"ContainerDied","Data":"2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8"} Dec 05 15:35:37 crc kubenswrapper[4840]: I1205 15:35:37.189399 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqdsf" event={"ID":"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a","Type":"ContainerStarted","Data":"9e30401e961422915eec2bb095a77cf9493d7ce6e35733d23e5ef43ef6ca39b0"} Dec 05 15:35:37 crc kubenswrapper[4840]: I1205 15:35:37.192140 4840 generic.go:334] "Generic (PLEG): container finished" podID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerID="20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0" exitCode=0 Dec 05 15:35:37 crc kubenswrapper[4840]: I1205 15:35:37.192189 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9l98" event={"ID":"ccdb6da2-a3e0-430b-bd92-26fcd4425773","Type":"ContainerDied","Data":"20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0"} Dec 05 15:35:38 crc kubenswrapper[4840]: I1205 15:35:38.208117 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9l98" event={"ID":"ccdb6da2-a3e0-430b-bd92-26fcd4425773","Type":"ContainerStarted","Data":"57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470"} Dec 05 15:35:38 crc kubenswrapper[4840]: I1205 15:35:38.215427 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqdsf" event={"ID":"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a","Type":"ContainerStarted","Data":"0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1"} Dec 05 15:35:38 crc kubenswrapper[4840]: I1205 15:35:38.232652 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f9l98" podStartSLOduration=3.624416753 podStartE2EDuration="6.23263236s" podCreationTimestamp="2025-12-05 15:35:32 +0000 UTC" firstStartedPulling="2025-12-05 15:35:35.110430741 +0000 UTC m=+2213.451493355" lastFinishedPulling="2025-12-05 15:35:37.718646338 +0000 UTC m=+2216.059708962" observedRunningTime="2025-12-05 15:35:38.229035088 +0000 UTC m=+2216.570097712" watchObservedRunningTime="2025-12-05 15:35:38.23263236 +0000 UTC m=+2216.573694984" Dec 05 15:35:39 crc kubenswrapper[4840]: I1205 15:35:39.232246 4840 generic.go:334] "Generic (PLEG): container finished" podID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerID="0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1" exitCode=0 Dec 05 15:35:39 crc kubenswrapper[4840]: I1205 15:35:39.232276 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqdsf" event={"ID":"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a","Type":"ContainerDied","Data":"0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1"} Dec 05 15:35:41 crc kubenswrapper[4840]: I1205 15:35:41.257760 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqdsf" event={"ID":"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a","Type":"ContainerStarted","Data":"7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5"} Dec 05 15:35:41 crc kubenswrapper[4840]: I1205 15:35:41.284225 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hqdsf" podStartSLOduration=3.182051765 podStartE2EDuration="6.284194946s" podCreationTimestamp="2025-12-05 15:35:35 +0000 UTC" firstStartedPulling="2025-12-05 15:35:37.190759781 +0000 UTC m=+2215.531822405" lastFinishedPulling="2025-12-05 15:35:40.292902932 +0000 UTC m=+2218.633965586" observedRunningTime="2025-12-05 15:35:41.277712862 +0000 UTC m=+2219.618775486" watchObservedRunningTime="2025-12-05 15:35:41.284194946 +0000 UTC m=+2219.625257590" Dec 05 15:35:43 crc kubenswrapper[4840]: I1205 15:35:43.361639 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:43 crc kubenswrapper[4840]: I1205 15:35:43.362023 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:43 crc kubenswrapper[4840]: I1205 15:35:43.406915 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:44 crc kubenswrapper[4840]: I1205 15:35:44.426797 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:45 crc kubenswrapper[4840]: I1205 15:35:45.469499 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f9l98"] Dec 05 15:35:45 crc kubenswrapper[4840]: I1205 15:35:45.795519 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:45 crc kubenswrapper[4840]: I1205 15:35:45.795580 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:45 crc kubenswrapper[4840]: I1205 15:35:45.847441 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:46 crc kubenswrapper[4840]: I1205 15:35:46.382079 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f9l98" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="registry-server" containerID="cri-o://57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470" gracePeriod=2 Dec 05 15:35:46 crc kubenswrapper[4840]: I1205 15:35:46.465015 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:46 crc kubenswrapper[4840]: I1205 15:35:46.837783 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.033073 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-utilities\") pod \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.033160 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-catalog-content\") pod \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.033192 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5x9q\" (UniqueName: \"kubernetes.io/projected/ccdb6da2-a3e0-430b-bd92-26fcd4425773-kube-api-access-w5x9q\") pod \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\" (UID: \"ccdb6da2-a3e0-430b-bd92-26fcd4425773\") " Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.033674 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-utilities" (OuterVolumeSpecName: "utilities") pod "ccdb6da2-a3e0-430b-bd92-26fcd4425773" (UID: "ccdb6da2-a3e0-430b-bd92-26fcd4425773"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.040514 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccdb6da2-a3e0-430b-bd92-26fcd4425773-kube-api-access-w5x9q" (OuterVolumeSpecName: "kube-api-access-w5x9q") pod "ccdb6da2-a3e0-430b-bd92-26fcd4425773" (UID: "ccdb6da2-a3e0-430b-bd92-26fcd4425773"). InnerVolumeSpecName "kube-api-access-w5x9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.097504 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ccdb6da2-a3e0-430b-bd92-26fcd4425773" (UID: "ccdb6da2-a3e0-430b-bd92-26fcd4425773"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.135614 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.135657 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ccdb6da2-a3e0-430b-bd92-26fcd4425773-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.135674 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5x9q\" (UniqueName: \"kubernetes.io/projected/ccdb6da2-a3e0-430b-bd92-26fcd4425773-kube-api-access-w5x9q\") on node \"crc\" DevicePath \"\"" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.392956 4840 generic.go:334] "Generic (PLEG): container finished" podID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerID="57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470" exitCode=0 Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.393774 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f9l98" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.394022 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9l98" event={"ID":"ccdb6da2-a3e0-430b-bd92-26fcd4425773","Type":"ContainerDied","Data":"57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470"} Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.394057 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f9l98" event={"ID":"ccdb6da2-a3e0-430b-bd92-26fcd4425773","Type":"ContainerDied","Data":"fffaf75c47d0769f75f84de8207105376a53f3ea693441f54710e871e88f8d19"} Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.394082 4840 scope.go:117] "RemoveContainer" containerID="57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.454026 4840 scope.go:117] "RemoveContainer" containerID="20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.454852 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f9l98"] Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.469977 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f9l98"] Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.482162 4840 scope.go:117] "RemoveContainer" containerID="955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.540095 4840 scope.go:117] "RemoveContainer" containerID="57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470" Dec 05 15:35:47 crc kubenswrapper[4840]: E1205 15:35:47.542790 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470\": container with ID starting with 57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470 not found: ID does not exist" containerID="57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.542841 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470"} err="failed to get container status \"57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470\": rpc error: code = NotFound desc = could not find container \"57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470\": container with ID starting with 57c4518caeda71607a8150b86904af39ff10ee2c7ca21935c2e97d2389d61470 not found: ID does not exist" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.542897 4840 scope.go:117] "RemoveContainer" containerID="20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0" Dec 05 15:35:47 crc kubenswrapper[4840]: E1205 15:35:47.543574 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0\": container with ID starting with 20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0 not found: ID does not exist" containerID="20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.543663 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0"} err="failed to get container status \"20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0\": rpc error: code = NotFound desc = could not find container \"20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0\": container with ID starting with 20cfaeba6eac19c04b4c73fca7dd00e4b45f4f9fbb8878e1afaca4140789caf0 not found: ID does not exist" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.543713 4840 scope.go:117] "RemoveContainer" containerID="955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69" Dec 05 15:35:47 crc kubenswrapper[4840]: E1205 15:35:47.544303 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69\": container with ID starting with 955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69 not found: ID does not exist" containerID="955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69" Dec 05 15:35:47 crc kubenswrapper[4840]: I1205 15:35:47.544368 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69"} err="failed to get container status \"955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69\": rpc error: code = NotFound desc = could not find container \"955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69\": container with ID starting with 955e0d11294aa3adb4e6c126a8c0f5e565364cdff91240fd7ecd4b6f34554d69 not found: ID does not exist" Dec 05 15:35:48 crc kubenswrapper[4840]: I1205 15:35:48.078803 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" path="/var/lib/kubelet/pods/ccdb6da2-a3e0-430b-bd92-26fcd4425773/volumes" Dec 05 15:35:48 crc kubenswrapper[4840]: I1205 15:35:48.270516 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hqdsf"] Dec 05 15:35:48 crc kubenswrapper[4840]: I1205 15:35:48.404894 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hqdsf" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="registry-server" containerID="cri-o://7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5" gracePeriod=2 Dec 05 15:35:48 crc kubenswrapper[4840]: I1205 15:35:48.852597 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.009430 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-catalog-content\") pod \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.009630 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-utilities\") pod \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.009792 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkd8w\" (UniqueName: \"kubernetes.io/projected/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-kube-api-access-mkd8w\") pod \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\" (UID: \"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a\") " Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.010928 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-utilities" (OuterVolumeSpecName: "utilities") pod "c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" (UID: "c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.015241 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-kube-api-access-mkd8w" (OuterVolumeSpecName: "kube-api-access-mkd8w") pod "c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" (UID: "c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a"). InnerVolumeSpecName "kube-api-access-mkd8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.059995 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" (UID: "c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.112584 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkd8w\" (UniqueName: \"kubernetes.io/projected/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-kube-api-access-mkd8w\") on node \"crc\" DevicePath \"\"" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.112788 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.112815 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.418248 4840 generic.go:334] "Generic (PLEG): container finished" podID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerID="7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5" exitCode=0 Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.418294 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqdsf" event={"ID":"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a","Type":"ContainerDied","Data":"7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5"} Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.418328 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hqdsf" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.418346 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hqdsf" event={"ID":"c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a","Type":"ContainerDied","Data":"9e30401e961422915eec2bb095a77cf9493d7ce6e35733d23e5ef43ef6ca39b0"} Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.418388 4840 scope.go:117] "RemoveContainer" containerID="7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.439658 4840 scope.go:117] "RemoveContainer" containerID="0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.457518 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hqdsf"] Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.465993 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hqdsf"] Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.472401 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.472450 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.483578 4840 scope.go:117] "RemoveContainer" containerID="2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.515277 4840 scope.go:117] "RemoveContainer" containerID="7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5" Dec 05 15:35:49 crc kubenswrapper[4840]: E1205 15:35:49.516297 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5\": container with ID starting with 7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5 not found: ID does not exist" containerID="7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.516328 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5"} err="failed to get container status \"7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5\": rpc error: code = NotFound desc = could not find container \"7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5\": container with ID starting with 7a6d0b8de4482b8b50fcea4b5707c5aae652f3044d01dfae293258001148c9e5 not found: ID does not exist" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.516347 4840 scope.go:117] "RemoveContainer" containerID="0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1" Dec 05 15:35:49 crc kubenswrapper[4840]: E1205 15:35:49.516799 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1\": container with ID starting with 0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1 not found: ID does not exist" containerID="0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.516930 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1"} err="failed to get container status \"0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1\": rpc error: code = NotFound desc = could not find container \"0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1\": container with ID starting with 0f0ad303f808653492f9defd66095ad8a2fe9618f9a992060769df4c85ac31a1 not found: ID does not exist" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.517038 4840 scope.go:117] "RemoveContainer" containerID="2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8" Dec 05 15:35:49 crc kubenswrapper[4840]: E1205 15:35:49.517348 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8\": container with ID starting with 2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8 not found: ID does not exist" containerID="2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8" Dec 05 15:35:49 crc kubenswrapper[4840]: I1205 15:35:49.517376 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8"} err="failed to get container status \"2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8\": rpc error: code = NotFound desc = could not find container \"2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8\": container with ID starting with 2ebb68c90409f61177ae047c4c8ac17ecb222ec7f0d57656ed001c7a42e7dde8 not found: ID does not exist" Dec 05 15:35:50 crc kubenswrapper[4840]: I1205 15:35:50.080396 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" path="/var/lib/kubelet/pods/c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a/volumes" Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.472354 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.472973 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.473016 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.473674 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.473726 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" gracePeriod=600 Dec 05 15:36:19 crc kubenswrapper[4840]: E1205 15:36:19.594138 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.707695 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" exitCode=0 Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.707744 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c"} Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.707783 4840 scope.go:117] "RemoveContainer" containerID="4326a1eba95fff009410270bad695965f3fad46dfb07fed1bafd6c12a2802aa3" Dec 05 15:36:19 crc kubenswrapper[4840]: I1205 15:36:19.709399 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:36:19 crc kubenswrapper[4840]: E1205 15:36:19.709713 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:36:35 crc kubenswrapper[4840]: I1205 15:36:35.067377 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:36:35 crc kubenswrapper[4840]: E1205 15:36:35.068912 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:36:48 crc kubenswrapper[4840]: I1205 15:36:48.067456 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:36:48 crc kubenswrapper[4840]: E1205 15:36:48.068226 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:37:03 crc kubenswrapper[4840]: I1205 15:37:03.067205 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:37:03 crc kubenswrapper[4840]: E1205 15:37:03.068085 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:37:15 crc kubenswrapper[4840]: I1205 15:37:15.067393 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:37:15 crc kubenswrapper[4840]: E1205 15:37:15.068423 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:37:26 crc kubenswrapper[4840]: I1205 15:37:26.067278 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:37:26 crc kubenswrapper[4840]: E1205 15:37:26.068147 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:37:41 crc kubenswrapper[4840]: I1205 15:37:41.068141 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:37:41 crc kubenswrapper[4840]: E1205 15:37:41.069107 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:37:55 crc kubenswrapper[4840]: I1205 15:37:55.072846 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:37:55 crc kubenswrapper[4840]: E1205 15:37:55.077172 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:38:07 crc kubenswrapper[4840]: I1205 15:38:07.066676 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:38:07 crc kubenswrapper[4840]: E1205 15:38:07.067651 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.374934 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cjnps"] Dec 05 15:38:13 crc kubenswrapper[4840]: E1205 15:38:13.375899 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="registry-server" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.375916 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="registry-server" Dec 05 15:38:13 crc kubenswrapper[4840]: E1205 15:38:13.375928 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="extract-utilities" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.375935 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="extract-utilities" Dec 05 15:38:13 crc kubenswrapper[4840]: E1205 15:38:13.376008 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="extract-content" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.376019 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="extract-content" Dec 05 15:38:13 crc kubenswrapper[4840]: E1205 15:38:13.376047 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="registry-server" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.376057 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="registry-server" Dec 05 15:38:13 crc kubenswrapper[4840]: E1205 15:38:13.376112 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="extract-utilities" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.376122 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="extract-utilities" Dec 05 15:38:13 crc kubenswrapper[4840]: E1205 15:38:13.376138 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="extract-content" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.376143 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="extract-content" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.376363 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccdb6da2-a3e0-430b-bd92-26fcd4425773" containerName="registry-server" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.376376 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c71a6d3c-ec2f-42f8-9b0d-ab5765412e1a" containerName="registry-server" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.377778 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.390536 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjnps"] Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.547189 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-utilities\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.547300 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-catalog-content\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.547823 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtmqn\" (UniqueName: \"kubernetes.io/projected/163405b4-3d01-4084-b71d-493a21638dea-kube-api-access-dtmqn\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.649277 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-utilities\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.649365 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-catalog-content\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.649452 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtmqn\" (UniqueName: \"kubernetes.io/projected/163405b4-3d01-4084-b71d-493a21638dea-kube-api-access-dtmqn\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.649996 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-utilities\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.650126 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-catalog-content\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.671604 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtmqn\" (UniqueName: \"kubernetes.io/projected/163405b4-3d01-4084-b71d-493a21638dea-kube-api-access-dtmqn\") pod \"redhat-marketplace-cjnps\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:13 crc kubenswrapper[4840]: I1205 15:38:13.716124 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:14 crc kubenswrapper[4840]: I1205 15:38:14.054668 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjnps"] Dec 05 15:38:14 crc kubenswrapper[4840]: I1205 15:38:14.389787 4840 generic.go:334] "Generic (PLEG): container finished" podID="163405b4-3d01-4084-b71d-493a21638dea" containerID="58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a" exitCode=0 Dec 05 15:38:14 crc kubenswrapper[4840]: I1205 15:38:14.389883 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjnps" event={"ID":"163405b4-3d01-4084-b71d-493a21638dea","Type":"ContainerDied","Data":"58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a"} Dec 05 15:38:14 crc kubenswrapper[4840]: I1205 15:38:14.390219 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjnps" event={"ID":"163405b4-3d01-4084-b71d-493a21638dea","Type":"ContainerStarted","Data":"a1f94f5bd265bb93db6d0818e2183005a28b2713f571eb30a0a6265c3001f091"} Dec 05 15:38:14 crc kubenswrapper[4840]: I1205 15:38:14.392957 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:38:16 crc kubenswrapper[4840]: I1205 15:38:16.413256 4840 generic.go:334] "Generic (PLEG): container finished" podID="163405b4-3d01-4084-b71d-493a21638dea" containerID="3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52" exitCode=0 Dec 05 15:38:16 crc kubenswrapper[4840]: I1205 15:38:16.413889 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjnps" event={"ID":"163405b4-3d01-4084-b71d-493a21638dea","Type":"ContainerDied","Data":"3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52"} Dec 05 15:38:17 crc kubenswrapper[4840]: I1205 15:38:17.424283 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjnps" event={"ID":"163405b4-3d01-4084-b71d-493a21638dea","Type":"ContainerStarted","Data":"88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf"} Dec 05 15:38:17 crc kubenswrapper[4840]: I1205 15:38:17.450791 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cjnps" podStartSLOduration=1.9789793580000001 podStartE2EDuration="4.450760368s" podCreationTimestamp="2025-12-05 15:38:13 +0000 UTC" firstStartedPulling="2025-12-05 15:38:14.39175532 +0000 UTC m=+2372.732817974" lastFinishedPulling="2025-12-05 15:38:16.86353637 +0000 UTC m=+2375.204598984" observedRunningTime="2025-12-05 15:38:17.441726152 +0000 UTC m=+2375.782788776" watchObservedRunningTime="2025-12-05 15:38:17.450760368 +0000 UTC m=+2375.791823002" Dec 05 15:38:19 crc kubenswrapper[4840]: I1205 15:38:19.066581 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:38:19 crc kubenswrapper[4840]: E1205 15:38:19.067129 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:38:23 crc kubenswrapper[4840]: I1205 15:38:23.717649 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:23 crc kubenswrapper[4840]: I1205 15:38:23.718202 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:23 crc kubenswrapper[4840]: I1205 15:38:23.769966 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:24 crc kubenswrapper[4840]: I1205 15:38:24.549155 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:24 crc kubenswrapper[4840]: I1205 15:38:24.598562 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjnps"] Dec 05 15:38:26 crc kubenswrapper[4840]: I1205 15:38:26.508293 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cjnps" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="registry-server" containerID="cri-o://88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf" gracePeriod=2 Dec 05 15:38:26 crc kubenswrapper[4840]: I1205 15:38:26.945498 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.020018 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-utilities\") pod \"163405b4-3d01-4084-b71d-493a21638dea\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.020838 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-catalog-content\") pod \"163405b4-3d01-4084-b71d-493a21638dea\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.020798 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-utilities" (OuterVolumeSpecName: "utilities") pod "163405b4-3d01-4084-b71d-493a21638dea" (UID: "163405b4-3d01-4084-b71d-493a21638dea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.023180 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtmqn\" (UniqueName: \"kubernetes.io/projected/163405b4-3d01-4084-b71d-493a21638dea-kube-api-access-dtmqn\") pod \"163405b4-3d01-4084-b71d-493a21638dea\" (UID: \"163405b4-3d01-4084-b71d-493a21638dea\") " Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.024135 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.030119 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/163405b4-3d01-4084-b71d-493a21638dea-kube-api-access-dtmqn" (OuterVolumeSpecName: "kube-api-access-dtmqn") pod "163405b4-3d01-4084-b71d-493a21638dea" (UID: "163405b4-3d01-4084-b71d-493a21638dea"). InnerVolumeSpecName "kube-api-access-dtmqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.042182 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "163405b4-3d01-4084-b71d-493a21638dea" (UID: "163405b4-3d01-4084-b71d-493a21638dea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.126510 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtmqn\" (UniqueName: \"kubernetes.io/projected/163405b4-3d01-4084-b71d-493a21638dea-kube-api-access-dtmqn\") on node \"crc\" DevicePath \"\"" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.126536 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163405b4-3d01-4084-b71d-493a21638dea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.517749 4840 generic.go:334] "Generic (PLEG): container finished" podID="163405b4-3d01-4084-b71d-493a21638dea" containerID="88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf" exitCode=0 Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.517833 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjnps" event={"ID":"163405b4-3d01-4084-b71d-493a21638dea","Type":"ContainerDied","Data":"88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf"} Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.518146 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cjnps" event={"ID":"163405b4-3d01-4084-b71d-493a21638dea","Type":"ContainerDied","Data":"a1f94f5bd265bb93db6d0818e2183005a28b2713f571eb30a0a6265c3001f091"} Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.518169 4840 scope.go:117] "RemoveContainer" containerID="88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.517895 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cjnps" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.546583 4840 scope.go:117] "RemoveContainer" containerID="3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.553937 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjnps"] Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.563179 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cjnps"] Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.573556 4840 scope.go:117] "RemoveContainer" containerID="58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.612199 4840 scope.go:117] "RemoveContainer" containerID="88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf" Dec 05 15:38:27 crc kubenswrapper[4840]: E1205 15:38:27.612721 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf\": container with ID starting with 88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf not found: ID does not exist" containerID="88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.612752 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf"} err="failed to get container status \"88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf\": rpc error: code = NotFound desc = could not find container \"88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf\": container with ID starting with 88fba39cfa4321b739d97eca1b44121aca7501c61ebddbadef6edbc165ab09bf not found: ID does not exist" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.612771 4840 scope.go:117] "RemoveContainer" containerID="3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52" Dec 05 15:38:27 crc kubenswrapper[4840]: E1205 15:38:27.613369 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52\": container with ID starting with 3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52 not found: ID does not exist" containerID="3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.613417 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52"} err="failed to get container status \"3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52\": rpc error: code = NotFound desc = could not find container \"3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52\": container with ID starting with 3fe4821fddf90f1228a02dca34727227c1fe3a6257c1bdc104ca1921d030bf52 not found: ID does not exist" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.613447 4840 scope.go:117] "RemoveContainer" containerID="58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a" Dec 05 15:38:27 crc kubenswrapper[4840]: E1205 15:38:27.613744 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a\": container with ID starting with 58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a not found: ID does not exist" containerID="58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a" Dec 05 15:38:27 crc kubenswrapper[4840]: I1205 15:38:27.613769 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a"} err="failed to get container status \"58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a\": rpc error: code = NotFound desc = could not find container \"58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a\": container with ID starting with 58f4d2a744882e16b5e44d2634aaaadf46ca048631ea9a145498cf0e1aa3367a not found: ID does not exist" Dec 05 15:38:28 crc kubenswrapper[4840]: I1205 15:38:28.076666 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="163405b4-3d01-4084-b71d-493a21638dea" path="/var/lib/kubelet/pods/163405b4-3d01-4084-b71d-493a21638dea/volumes" Dec 05 15:38:30 crc kubenswrapper[4840]: I1205 15:38:30.066886 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:38:30 crc kubenswrapper[4840]: E1205 15:38:30.067489 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:38:44 crc kubenswrapper[4840]: I1205 15:38:44.067773 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:38:44 crc kubenswrapper[4840]: E1205 15:38:44.069150 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:38:56 crc kubenswrapper[4840]: I1205 15:38:56.067260 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:38:56 crc kubenswrapper[4840]: E1205 15:38:56.068181 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:39:07 crc kubenswrapper[4840]: I1205 15:39:07.066686 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:39:07 crc kubenswrapper[4840]: E1205 15:39:07.067659 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:39:21 crc kubenswrapper[4840]: I1205 15:39:21.067670 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:39:21 crc kubenswrapper[4840]: E1205 15:39:21.068798 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:39:27 crc kubenswrapper[4840]: I1205 15:39:27.098178 4840 generic.go:334] "Generic (PLEG): container finished" podID="453e239f-2acb-42cb-a617-35975fb5437a" containerID="210e1141c82447893d9178675b116bf3749dd8d1e9aa40cb3c6a587138d495ac" exitCode=0 Dec 05 15:39:27 crc kubenswrapper[4840]: I1205 15:39:27.098306 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" event={"ID":"453e239f-2acb-42cb-a617-35975fb5437a","Type":"ContainerDied","Data":"210e1141c82447893d9178675b116bf3749dd8d1e9aa40cb3c6a587138d495ac"} Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.602806 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.769685 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-secret-0\") pod \"453e239f-2acb-42cb-a617-35975fb5437a\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.769988 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp4x7\" (UniqueName: \"kubernetes.io/projected/453e239f-2acb-42cb-a617-35975fb5437a-kube-api-access-kp4x7\") pod \"453e239f-2acb-42cb-a617-35975fb5437a\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.770129 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-inventory\") pod \"453e239f-2acb-42cb-a617-35975fb5437a\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.770264 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-combined-ca-bundle\") pod \"453e239f-2acb-42cb-a617-35975fb5437a\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.770442 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-ssh-key\") pod \"453e239f-2acb-42cb-a617-35975fb5437a\" (UID: \"453e239f-2acb-42cb-a617-35975fb5437a\") " Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.776944 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "453e239f-2acb-42cb-a617-35975fb5437a" (UID: "453e239f-2acb-42cb-a617-35975fb5437a"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.783096 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/453e239f-2acb-42cb-a617-35975fb5437a-kube-api-access-kp4x7" (OuterVolumeSpecName: "kube-api-access-kp4x7") pod "453e239f-2acb-42cb-a617-35975fb5437a" (UID: "453e239f-2acb-42cb-a617-35975fb5437a"). InnerVolumeSpecName "kube-api-access-kp4x7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.802648 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "453e239f-2acb-42cb-a617-35975fb5437a" (UID: "453e239f-2acb-42cb-a617-35975fb5437a"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.812060 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "453e239f-2acb-42cb-a617-35975fb5437a" (UID: "453e239f-2acb-42cb-a617-35975fb5437a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.818357 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-inventory" (OuterVolumeSpecName: "inventory") pod "453e239f-2acb-42cb-a617-35975fb5437a" (UID: "453e239f-2acb-42cb-a617-35975fb5437a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.873352 4840 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.873385 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp4x7\" (UniqueName: \"kubernetes.io/projected/453e239f-2acb-42cb-a617-35975fb5437a-kube-api-access-kp4x7\") on node \"crc\" DevicePath \"\"" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.873402 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.873418 4840 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:39:28 crc kubenswrapper[4840]: I1205 15:39:28.873430 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/453e239f-2acb-42cb-a617-35975fb5437a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.120267 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" event={"ID":"453e239f-2acb-42cb-a617-35975fb5437a","Type":"ContainerDied","Data":"0b5d08d92cb2fb542c8fb9dd523fd33838cb58e26d09d4108b73b317e7670aab"} Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.120328 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b5d08d92cb2fb542c8fb9dd523fd33838cb58e26d09d4108b73b317e7670aab" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.120394 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.224127 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4"] Dec 05 15:39:29 crc kubenswrapper[4840]: E1205 15:39:29.224489 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="extract-content" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.224504 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="extract-content" Dec 05 15:39:29 crc kubenswrapper[4840]: E1205 15:39:29.224540 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="453e239f-2acb-42cb-a617-35975fb5437a" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.224548 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="453e239f-2acb-42cb-a617-35975fb5437a" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 15:39:29 crc kubenswrapper[4840]: E1205 15:39:29.224565 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="extract-utilities" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.224571 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="extract-utilities" Dec 05 15:39:29 crc kubenswrapper[4840]: E1205 15:39:29.224582 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="registry-server" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.224588 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="registry-server" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.224765 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="163405b4-3d01-4084-b71d-493a21638dea" containerName="registry-server" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.224776 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="453e239f-2acb-42cb-a617-35975fb5437a" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.225381 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.228459 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.228503 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.228736 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.229330 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.229352 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.236813 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.237182 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.247399 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4"] Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.382973 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383056 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383086 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383164 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383245 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ddh4\" (UniqueName: \"kubernetes.io/projected/c64b6a7d-6e39-40f8-8837-660f386a357e-kube-api-access-6ddh4\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383328 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383367 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383440 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.383465 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.485971 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ddh4\" (UniqueName: \"kubernetes.io/projected/c64b6a7d-6e39-40f8-8837-660f386a357e-kube-api-access-6ddh4\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486066 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486125 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486194 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486223 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486273 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486294 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486323 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.486818 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.490273 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.491143 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.492126 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.492621 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.493547 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.497913 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.499249 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.506722 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.511232 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ddh4\" (UniqueName: \"kubernetes.io/projected/c64b6a7d-6e39-40f8-8837-660f386a357e-kube-api-access-6ddh4\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4c2f4\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:29 crc kubenswrapper[4840]: I1205 15:39:29.553421 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:39:30 crc kubenswrapper[4840]: I1205 15:39:30.109108 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4"] Dec 05 15:39:30 crc kubenswrapper[4840]: I1205 15:39:30.130608 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" event={"ID":"c64b6a7d-6e39-40f8-8837-660f386a357e","Type":"ContainerStarted","Data":"e7feeb0dfc3b489a3f4067b691eff1e749b4e42e0b3e08afa1996d0e0a82fce6"} Dec 05 15:39:31 crc kubenswrapper[4840]: I1205 15:39:31.139182 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" event={"ID":"c64b6a7d-6e39-40f8-8837-660f386a357e","Type":"ContainerStarted","Data":"f255968748d2e3c0438b75e4e4888af68b06034f167d91d72c0f3c476a19c959"} Dec 05 15:39:31 crc kubenswrapper[4840]: I1205 15:39:31.161994 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" podStartSLOduration=1.674203769 podStartE2EDuration="2.161973242s" podCreationTimestamp="2025-12-05 15:39:29 +0000 UTC" firstStartedPulling="2025-12-05 15:39:30.116375808 +0000 UTC m=+2448.457438422" lastFinishedPulling="2025-12-05 15:39:30.604145261 +0000 UTC m=+2448.945207895" observedRunningTime="2025-12-05 15:39:31.157772192 +0000 UTC m=+2449.498834816" watchObservedRunningTime="2025-12-05 15:39:31.161973242 +0000 UTC m=+2449.503035856" Dec 05 15:39:35 crc kubenswrapper[4840]: I1205 15:39:35.066744 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:39:35 crc kubenswrapper[4840]: E1205 15:39:35.067681 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:39:49 crc kubenswrapper[4840]: I1205 15:39:49.067077 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:39:49 crc kubenswrapper[4840]: E1205 15:39:49.067934 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:40:04 crc kubenswrapper[4840]: I1205 15:40:04.072055 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:40:04 crc kubenswrapper[4840]: E1205 15:40:04.072981 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:40:19 crc kubenswrapper[4840]: I1205 15:40:19.067157 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:40:19 crc kubenswrapper[4840]: E1205 15:40:19.069200 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:40:34 crc kubenswrapper[4840]: I1205 15:40:34.067351 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:40:34 crc kubenswrapper[4840]: E1205 15:40:34.068206 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:40:47 crc kubenswrapper[4840]: I1205 15:40:47.066715 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:40:47 crc kubenswrapper[4840]: E1205 15:40:47.067731 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:40:59 crc kubenswrapper[4840]: I1205 15:40:59.066749 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:40:59 crc kubenswrapper[4840]: E1205 15:40:59.067469 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:41:10 crc kubenswrapper[4840]: I1205 15:41:10.067391 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:41:10 crc kubenswrapper[4840]: E1205 15:41:10.068169 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:41:21 crc kubenswrapper[4840]: I1205 15:41:21.066626 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:41:21 crc kubenswrapper[4840]: I1205 15:41:21.601096 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"5e57f28200b123d8625a5f66153ca84da704d74d1056a91e0747e528997298e9"} Dec 05 15:42:20 crc kubenswrapper[4840]: I1205 15:42:20.150563 4840 generic.go:334] "Generic (PLEG): container finished" podID="c64b6a7d-6e39-40f8-8837-660f386a357e" containerID="f255968748d2e3c0438b75e4e4888af68b06034f167d91d72c0f3c476a19c959" exitCode=0 Dec 05 15:42:20 crc kubenswrapper[4840]: I1205 15:42:20.150655 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" event={"ID":"c64b6a7d-6e39-40f8-8837-660f386a357e","Type":"ContainerDied","Data":"f255968748d2e3c0438b75e4e4888af68b06034f167d91d72c0f3c476a19c959"} Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.586379 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744233 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-combined-ca-bundle\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744290 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-1\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744396 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-0\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744457 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ddh4\" (UniqueName: \"kubernetes.io/projected/c64b6a7d-6e39-40f8-8837-660f386a357e-kube-api-access-6ddh4\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744569 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-extra-config-0\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744652 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-1\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744675 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-inventory\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744731 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-0\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.744778 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-ssh-key\") pod \"c64b6a7d-6e39-40f8-8837-660f386a357e\" (UID: \"c64b6a7d-6e39-40f8-8837-660f386a357e\") " Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.763457 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c64b6a7d-6e39-40f8-8837-660f386a357e-kube-api-access-6ddh4" (OuterVolumeSpecName: "kube-api-access-6ddh4") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "kube-api-access-6ddh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.766429 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.794580 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.802751 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.803015 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.819695 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.829969 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.830258 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.835478 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-inventory" (OuterVolumeSpecName: "inventory") pod "c64b6a7d-6e39-40f8-8837-660f386a357e" (UID: "c64b6a7d-6e39-40f8-8837-660f386a357e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.847992 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848032 4840 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848051 4840 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848064 4840 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848075 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ddh4\" (UniqueName: \"kubernetes.io/projected/c64b6a7d-6e39-40f8-8837-660f386a357e-kube-api-access-6ddh4\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848120 4840 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848133 4840 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848144 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:21 crc kubenswrapper[4840]: I1205 15:42:21.848155 4840 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c64b6a7d-6e39-40f8-8837-660f386a357e-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.170256 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" event={"ID":"c64b6a7d-6e39-40f8-8837-660f386a357e","Type":"ContainerDied","Data":"e7feeb0dfc3b489a3f4067b691eff1e749b4e42e0b3e08afa1996d0e0a82fce6"} Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.170306 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7feeb0dfc3b489a3f4067b691eff1e749b4e42e0b3e08afa1996d0e0a82fce6" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.170398 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4c2f4" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.305098 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd"] Dec 05 15:42:22 crc kubenswrapper[4840]: E1205 15:42:22.305820 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c64b6a7d-6e39-40f8-8837-660f386a357e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.305852 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c64b6a7d-6e39-40f8-8837-660f386a357e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.306148 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c64b6a7d-6e39-40f8-8837-660f386a357e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.307357 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.311661 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.311778 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.312061 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-6c9x2" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.314832 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.314840 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.318670 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd"] Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.360931 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.361000 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.361029 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.361105 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.361136 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5m46\" (UniqueName: \"kubernetes.io/projected/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-kube-api-access-j5m46\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.361169 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.361219 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.462551 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.462661 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.462700 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.462725 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.462794 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.462856 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5m46\" (UniqueName: \"kubernetes.io/projected/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-kube-api-access-j5m46\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.462921 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.466457 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.466712 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.467261 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.467361 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.467490 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.467653 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.480264 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5m46\" (UniqueName: \"kubernetes.io/projected/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-kube-api-access-j5m46\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-76pjd\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:22 crc kubenswrapper[4840]: I1205 15:42:22.654720 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:42:23 crc kubenswrapper[4840]: I1205 15:42:23.278465 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd"] Dec 05 15:42:24 crc kubenswrapper[4840]: I1205 15:42:24.204093 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" event={"ID":"d51bc3d7-3ce5-4967-ba22-71cef47d25d1","Type":"ContainerStarted","Data":"45c8e77f694cedd55202acf9c3889557beec79c5be2c09a561e1a5036bf68fa0"} Dec 05 15:42:25 crc kubenswrapper[4840]: I1205 15:42:25.232065 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" event={"ID":"d51bc3d7-3ce5-4967-ba22-71cef47d25d1","Type":"ContainerStarted","Data":"f8c5296ca36215ace6c2b01aca1ba9b15dd417db57b4c820e54e91ce887b7ead"} Dec 05 15:42:25 crc kubenswrapper[4840]: I1205 15:42:25.258335 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" podStartSLOduration=2.214250752 podStartE2EDuration="3.258317481s" podCreationTimestamp="2025-12-05 15:42:22 +0000 UTC" firstStartedPulling="2025-12-05 15:42:23.283995777 +0000 UTC m=+2621.625058391" lastFinishedPulling="2025-12-05 15:42:24.328062506 +0000 UTC m=+2622.669125120" observedRunningTime="2025-12-05 15:42:25.253407281 +0000 UTC m=+2623.594469895" watchObservedRunningTime="2025-12-05 15:42:25.258317481 +0000 UTC m=+2623.599380095" Dec 05 15:43:49 crc kubenswrapper[4840]: I1205 15:43:49.591193 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:43:49 crc kubenswrapper[4840]: I1205 15:43:49.591814 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.627997 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tsjwf"] Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.635360 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.649366 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tsjwf"] Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.738027 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-utilities\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.738071 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74j9c\" (UniqueName: \"kubernetes.io/projected/77ad16ff-7dea-4194-b756-8f328c53bc6d-kube-api-access-74j9c\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.738112 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-catalog-content\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.839939 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-utilities\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.839987 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74j9c\" (UniqueName: \"kubernetes.io/projected/77ad16ff-7dea-4194-b756-8f328c53bc6d-kube-api-access-74j9c\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.840022 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-catalog-content\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.840576 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-catalog-content\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.840821 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-utilities\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.862736 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74j9c\" (UniqueName: \"kubernetes.io/projected/77ad16ff-7dea-4194-b756-8f328c53bc6d-kube-api-access-74j9c\") pod \"redhat-operators-tsjwf\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:13 crc kubenswrapper[4840]: I1205 15:44:13.964564 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:14 crc kubenswrapper[4840]: I1205 15:44:14.442087 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tsjwf"] Dec 05 15:44:14 crc kubenswrapper[4840]: I1205 15:44:14.605187 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsjwf" event={"ID":"77ad16ff-7dea-4194-b756-8f328c53bc6d","Type":"ContainerStarted","Data":"0aca2ceb10c33d3860954d75eb83ea367d9c5234ff471ba305dac93e09f04566"} Dec 05 15:44:15 crc kubenswrapper[4840]: I1205 15:44:15.615770 4840 generic.go:334] "Generic (PLEG): container finished" podID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerID="19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a" exitCode=0 Dec 05 15:44:15 crc kubenswrapper[4840]: I1205 15:44:15.615831 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsjwf" event={"ID":"77ad16ff-7dea-4194-b756-8f328c53bc6d","Type":"ContainerDied","Data":"19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a"} Dec 05 15:44:15 crc kubenswrapper[4840]: I1205 15:44:15.618164 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:44:16 crc kubenswrapper[4840]: I1205 15:44:16.626758 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsjwf" event={"ID":"77ad16ff-7dea-4194-b756-8f328c53bc6d","Type":"ContainerStarted","Data":"a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6"} Dec 05 15:44:18 crc kubenswrapper[4840]: I1205 15:44:18.651323 4840 generic.go:334] "Generic (PLEG): container finished" podID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerID="a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6" exitCode=0 Dec 05 15:44:18 crc kubenswrapper[4840]: I1205 15:44:18.651423 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsjwf" event={"ID":"77ad16ff-7dea-4194-b756-8f328c53bc6d","Type":"ContainerDied","Data":"a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6"} Dec 05 15:44:19 crc kubenswrapper[4840]: I1205 15:44:19.472008 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:44:19 crc kubenswrapper[4840]: I1205 15:44:19.472080 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:44:20 crc kubenswrapper[4840]: I1205 15:44:20.676798 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsjwf" event={"ID":"77ad16ff-7dea-4194-b756-8f328c53bc6d","Type":"ContainerStarted","Data":"cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725"} Dec 05 15:44:20 crc kubenswrapper[4840]: I1205 15:44:20.704453 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tsjwf" podStartSLOduration=3.124433106 podStartE2EDuration="7.704435262s" podCreationTimestamp="2025-12-05 15:44:13 +0000 UTC" firstStartedPulling="2025-12-05 15:44:15.617801725 +0000 UTC m=+2733.958864339" lastFinishedPulling="2025-12-05 15:44:20.197803871 +0000 UTC m=+2738.538866495" observedRunningTime="2025-12-05 15:44:20.694099317 +0000 UTC m=+2739.035161931" watchObservedRunningTime="2025-12-05 15:44:20.704435262 +0000 UTC m=+2739.045497876" Dec 05 15:44:23 crc kubenswrapper[4840]: I1205 15:44:23.965564 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:23 crc kubenswrapper[4840]: I1205 15:44:23.965994 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:25 crc kubenswrapper[4840]: I1205 15:44:25.015367 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tsjwf" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="registry-server" probeResult="failure" output=< Dec 05 15:44:25 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:44:25 crc kubenswrapper[4840]: > Dec 05 15:44:34 crc kubenswrapper[4840]: I1205 15:44:34.023978 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:34 crc kubenswrapper[4840]: I1205 15:44:34.088888 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:34 crc kubenswrapper[4840]: I1205 15:44:34.273084 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tsjwf"] Dec 05 15:44:35 crc kubenswrapper[4840]: I1205 15:44:35.811251 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tsjwf" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="registry-server" containerID="cri-o://cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725" gracePeriod=2 Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.257248 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.374511 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74j9c\" (UniqueName: \"kubernetes.io/projected/77ad16ff-7dea-4194-b756-8f328c53bc6d-kube-api-access-74j9c\") pod \"77ad16ff-7dea-4194-b756-8f328c53bc6d\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.375134 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-catalog-content\") pod \"77ad16ff-7dea-4194-b756-8f328c53bc6d\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.375226 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-utilities\") pod \"77ad16ff-7dea-4194-b756-8f328c53bc6d\" (UID: \"77ad16ff-7dea-4194-b756-8f328c53bc6d\") " Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.377118 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-utilities" (OuterVolumeSpecName: "utilities") pod "77ad16ff-7dea-4194-b756-8f328c53bc6d" (UID: "77ad16ff-7dea-4194-b756-8f328c53bc6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.381190 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77ad16ff-7dea-4194-b756-8f328c53bc6d-kube-api-access-74j9c" (OuterVolumeSpecName: "kube-api-access-74j9c") pod "77ad16ff-7dea-4194-b756-8f328c53bc6d" (UID: "77ad16ff-7dea-4194-b756-8f328c53bc6d"). InnerVolumeSpecName "kube-api-access-74j9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.477077 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.477114 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74j9c\" (UniqueName: \"kubernetes.io/projected/77ad16ff-7dea-4194-b756-8f328c53bc6d-kube-api-access-74j9c\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.487722 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77ad16ff-7dea-4194-b756-8f328c53bc6d" (UID: "77ad16ff-7dea-4194-b756-8f328c53bc6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.579201 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77ad16ff-7dea-4194-b756-8f328c53bc6d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.821422 4840 generic.go:334] "Generic (PLEG): container finished" podID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerID="cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725" exitCode=0 Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.821513 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tsjwf" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.821516 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsjwf" event={"ID":"77ad16ff-7dea-4194-b756-8f328c53bc6d","Type":"ContainerDied","Data":"cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725"} Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.822729 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tsjwf" event={"ID":"77ad16ff-7dea-4194-b756-8f328c53bc6d","Type":"ContainerDied","Data":"0aca2ceb10c33d3860954d75eb83ea367d9c5234ff471ba305dac93e09f04566"} Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.822760 4840 scope.go:117] "RemoveContainer" containerID="cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.856141 4840 scope.go:117] "RemoveContainer" containerID="a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.866028 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tsjwf"] Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.873402 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tsjwf"] Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.881686 4840 scope.go:117] "RemoveContainer" containerID="19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.933461 4840 scope.go:117] "RemoveContainer" containerID="cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725" Dec 05 15:44:36 crc kubenswrapper[4840]: E1205 15:44:36.933930 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725\": container with ID starting with cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725 not found: ID does not exist" containerID="cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.933967 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725"} err="failed to get container status \"cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725\": rpc error: code = NotFound desc = could not find container \"cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725\": container with ID starting with cb1ca6bfe4263b31e01fc5baf73d966a9a1107942c5689e0e7030cc635e19725 not found: ID does not exist" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.933994 4840 scope.go:117] "RemoveContainer" containerID="a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6" Dec 05 15:44:36 crc kubenswrapper[4840]: E1205 15:44:36.934240 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6\": container with ID starting with a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6 not found: ID does not exist" containerID="a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.934283 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6"} err="failed to get container status \"a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6\": rpc error: code = NotFound desc = could not find container \"a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6\": container with ID starting with a278f458df290ef3c1435e3fb4d8c8d8a8343236f72c575c60bef64805c48eb6 not found: ID does not exist" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.934312 4840 scope.go:117] "RemoveContainer" containerID="19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a" Dec 05 15:44:36 crc kubenswrapper[4840]: E1205 15:44:36.934550 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a\": container with ID starting with 19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a not found: ID does not exist" containerID="19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a" Dec 05 15:44:36 crc kubenswrapper[4840]: I1205 15:44:36.934579 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a"} err="failed to get container status \"19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a\": rpc error: code = NotFound desc = could not find container \"19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a\": container with ID starting with 19fec58c7de56c830b1e3b9bf140cd66e2b51b6d84312dab287aa2824190bc4a not found: ID does not exist" Dec 05 15:44:38 crc kubenswrapper[4840]: I1205 15:44:38.077648 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" path="/var/lib/kubelet/pods/77ad16ff-7dea-4194-b756-8f328c53bc6d/volumes" Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.472079 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.472645 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.472691 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.473478 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5e57f28200b123d8625a5f66153ca84da704d74d1056a91e0747e528997298e9"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.473537 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://5e57f28200b123d8625a5f66153ca84da704d74d1056a91e0747e528997298e9" gracePeriod=600 Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.933453 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="5e57f28200b123d8625a5f66153ca84da704d74d1056a91e0747e528997298e9" exitCode=0 Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.933517 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"5e57f28200b123d8625a5f66153ca84da704d74d1056a91e0747e528997298e9"} Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.933735 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d"} Dec 05 15:44:49 crc kubenswrapper[4840]: I1205 15:44:49.933753 4840 scope.go:117] "RemoveContainer" containerID="9c77d0f6942376cb418de3117648c3450135c206b22382f846064c68b2d1883c" Dec 05 15:44:50 crc kubenswrapper[4840]: I1205 15:44:50.945225 4840 generic.go:334] "Generic (PLEG): container finished" podID="d51bc3d7-3ce5-4967-ba22-71cef47d25d1" containerID="f8c5296ca36215ace6c2b01aca1ba9b15dd417db57b4c820e54e91ce887b7ead" exitCode=0 Dec 05 15:44:50 crc kubenswrapper[4840]: I1205 15:44:50.945329 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" event={"ID":"d51bc3d7-3ce5-4967-ba22-71cef47d25d1","Type":"ContainerDied","Data":"f8c5296ca36215ace6c2b01aca1ba9b15dd417db57b4c820e54e91ce887b7ead"} Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.401712 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.469542 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-inventory\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.469600 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-2\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.469663 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.469698 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-0\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.469729 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5m46\" (UniqueName: \"kubernetes.io/projected/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-kube-api-access-j5m46\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.469764 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-telemetry-combined-ca-bundle\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.469812 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-1\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.476006 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-kube-api-access-j5m46" (OuterVolumeSpecName: "kube-api-access-j5m46") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1"). InnerVolumeSpecName "kube-api-access-j5m46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.477508 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.502349 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.504631 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:44:52 crc kubenswrapper[4840]: E1205 15:44:52.506112 4840 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key podName:d51bc3d7-3ce5-4967-ba22-71cef47d25d1 nodeName:}" failed. No retries permitted until 2025-12-05 15:44:53.006090015 +0000 UTC m=+2771.347152629 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1") : error deleting /var/lib/kubelet/pods/d51bc3d7-3ce5-4967-ba22-71cef47d25d1/volume-subpaths: remove /var/lib/kubelet/pods/d51bc3d7-3ce5-4967-ba22-71cef47d25d1/volume-subpaths: no such file or directory Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.505996 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.509042 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-inventory" (OuterVolumeSpecName: "inventory") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.572667 4840 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.572708 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5m46\" (UniqueName: \"kubernetes.io/projected/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-kube-api-access-j5m46\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.572722 4840 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.572735 4840 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.572747 4840 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.572761 4840 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.977601 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.978307 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-76pjd" event={"ID":"d51bc3d7-3ce5-4967-ba22-71cef47d25d1","Type":"ContainerDied","Data":"45c8e77f694cedd55202acf9c3889557beec79c5be2c09a561e1a5036bf68fa0"} Dec 05 15:44:52 crc kubenswrapper[4840]: I1205 15:44:52.978341 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45c8e77f694cedd55202acf9c3889557beec79c5be2c09a561e1a5036bf68fa0" Dec 05 15:44:53 crc kubenswrapper[4840]: I1205 15:44:53.083240 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key\") pod \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\" (UID: \"d51bc3d7-3ce5-4967-ba22-71cef47d25d1\") " Dec 05 15:44:53 crc kubenswrapper[4840]: I1205 15:44:53.087790 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d51bc3d7-3ce5-4967-ba22-71cef47d25d1" (UID: "d51bc3d7-3ce5-4967-ba22-71cef47d25d1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:44:53 crc kubenswrapper[4840]: I1205 15:44:53.185192 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d51bc3d7-3ce5-4967-ba22-71cef47d25d1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.152729 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db"] Dec 05 15:45:00 crc kubenswrapper[4840]: E1205 15:45:00.153758 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d51bc3d7-3ce5-4967-ba22-71cef47d25d1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.153782 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d51bc3d7-3ce5-4967-ba22-71cef47d25d1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 15:45:00 crc kubenswrapper[4840]: E1205 15:45:00.153815 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="registry-server" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.153821 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="registry-server" Dec 05 15:45:00 crc kubenswrapper[4840]: E1205 15:45:00.153833 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="extract-utilities" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.153839 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="extract-utilities" Dec 05 15:45:00 crc kubenswrapper[4840]: E1205 15:45:00.153857 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="extract-content" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.153889 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="extract-content" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.154148 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="77ad16ff-7dea-4194-b756-8f328c53bc6d" containerName="registry-server" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.154169 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d51bc3d7-3ce5-4967-ba22-71cef47d25d1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.154833 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.157030 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.157415 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.166353 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db"] Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.206087 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd7ml\" (UniqueName: \"kubernetes.io/projected/433bbdcb-77bf-4722-a217-e6cfc5de16bf-kube-api-access-zd7ml\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.206141 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/433bbdcb-77bf-4722-a217-e6cfc5de16bf-secret-volume\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.206248 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/433bbdcb-77bf-4722-a217-e6cfc5de16bf-config-volume\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.308029 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd7ml\" (UniqueName: \"kubernetes.io/projected/433bbdcb-77bf-4722-a217-e6cfc5de16bf-kube-api-access-zd7ml\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.308659 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/433bbdcb-77bf-4722-a217-e6cfc5de16bf-secret-volume\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.310116 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/433bbdcb-77bf-4722-a217-e6cfc5de16bf-config-volume\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.311209 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/433bbdcb-77bf-4722-a217-e6cfc5de16bf-config-volume\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.315994 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/433bbdcb-77bf-4722-a217-e6cfc5de16bf-secret-volume\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.333415 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd7ml\" (UniqueName: \"kubernetes.io/projected/433bbdcb-77bf-4722-a217-e6cfc5de16bf-kube-api-access-zd7ml\") pod \"collect-profiles-29415825-l68db\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.478781 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:00 crc kubenswrapper[4840]: I1205 15:45:00.918421 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db"] Dec 05 15:45:01 crc kubenswrapper[4840]: I1205 15:45:01.060537 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" event={"ID":"433bbdcb-77bf-4722-a217-e6cfc5de16bf","Type":"ContainerStarted","Data":"f9a5a2b85c106f19c40573f988ed2c3e9c28f48e2679d648a888b75f885310ff"} Dec 05 15:45:02 crc kubenswrapper[4840]: I1205 15:45:02.069806 4840 generic.go:334] "Generic (PLEG): container finished" podID="433bbdcb-77bf-4722-a217-e6cfc5de16bf" containerID="d17f2a043f1f1075817910fc5746eb2b34bfeb4a8759f075ba7efd9909436c1f" exitCode=0 Dec 05 15:45:02 crc kubenswrapper[4840]: I1205 15:45:02.082321 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" event={"ID":"433bbdcb-77bf-4722-a217-e6cfc5de16bf","Type":"ContainerDied","Data":"d17f2a043f1f1075817910fc5746eb2b34bfeb4a8759f075ba7efd9909436c1f"} Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.497096 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.666107 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/433bbdcb-77bf-4722-a217-e6cfc5de16bf-config-volume\") pod \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.666203 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd7ml\" (UniqueName: \"kubernetes.io/projected/433bbdcb-77bf-4722-a217-e6cfc5de16bf-kube-api-access-zd7ml\") pod \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.666337 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/433bbdcb-77bf-4722-a217-e6cfc5de16bf-secret-volume\") pod \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\" (UID: \"433bbdcb-77bf-4722-a217-e6cfc5de16bf\") " Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.666992 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/433bbdcb-77bf-4722-a217-e6cfc5de16bf-config-volume" (OuterVolumeSpecName: "config-volume") pod "433bbdcb-77bf-4722-a217-e6cfc5de16bf" (UID: "433bbdcb-77bf-4722-a217-e6cfc5de16bf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.672823 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/433bbdcb-77bf-4722-a217-e6cfc5de16bf-kube-api-access-zd7ml" (OuterVolumeSpecName: "kube-api-access-zd7ml") pod "433bbdcb-77bf-4722-a217-e6cfc5de16bf" (UID: "433bbdcb-77bf-4722-a217-e6cfc5de16bf"). InnerVolumeSpecName "kube-api-access-zd7ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.672984 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433bbdcb-77bf-4722-a217-e6cfc5de16bf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "433bbdcb-77bf-4722-a217-e6cfc5de16bf" (UID: "433bbdcb-77bf-4722-a217-e6cfc5de16bf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.768672 4840 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/433bbdcb-77bf-4722-a217-e6cfc5de16bf-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.768716 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd7ml\" (UniqueName: \"kubernetes.io/projected/433bbdcb-77bf-4722-a217-e6cfc5de16bf-kube-api-access-zd7ml\") on node \"crc\" DevicePath \"\"" Dec 05 15:45:03 crc kubenswrapper[4840]: I1205 15:45:03.768728 4840 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/433bbdcb-77bf-4722-a217-e6cfc5de16bf-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 15:45:04 crc kubenswrapper[4840]: I1205 15:45:04.089362 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" event={"ID":"433bbdcb-77bf-4722-a217-e6cfc5de16bf","Type":"ContainerDied","Data":"f9a5a2b85c106f19c40573f988ed2c3e9c28f48e2679d648a888b75f885310ff"} Dec 05 15:45:04 crc kubenswrapper[4840]: I1205 15:45:04.089409 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9a5a2b85c106f19c40573f988ed2c3e9c28f48e2679d648a888b75f885310ff" Dec 05 15:45:04 crc kubenswrapper[4840]: I1205 15:45:04.089453 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415825-l68db" Dec 05 15:45:04 crc kubenswrapper[4840]: I1205 15:45:04.585441 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq"] Dec 05 15:45:04 crc kubenswrapper[4840]: I1205 15:45:04.594195 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415780-k44mq"] Dec 05 15:45:06 crc kubenswrapper[4840]: I1205 15:45:06.079058 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70be4dce-113d-4659-b5dc-55c3c724de12" path="/var/lib/kubelet/pods/70be4dce-113d-4659-b5dc-55c3c724de12/volumes" Dec 05 15:45:19 crc kubenswrapper[4840]: I1205 15:45:19.055401 4840 scope.go:117] "RemoveContainer" containerID="8e45db5dc85e7c50bcd98a93e6e42a6c3aa7b086f5e71f52eee6410338bae161" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.380653 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 15:45:49 crc kubenswrapper[4840]: E1205 15:45:49.381795 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="433bbdcb-77bf-4722-a217-e6cfc5de16bf" containerName="collect-profiles" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.381815 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="433bbdcb-77bf-4722-a217-e6cfc5de16bf" containerName="collect-profiles" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.382082 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="433bbdcb-77bf-4722-a217-e6cfc5de16bf" containerName="collect-profiles" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.382938 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.390657 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.391381 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.391394 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mgrdk" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.393227 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-config-data\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.393286 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.393331 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.400030 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.411779 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.494728 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.494923 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d9r6\" (UniqueName: \"kubernetes.io/projected/d2e8a783-170e-44cb-a505-1ee2a96572af-kube-api-access-4d9r6\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.495030 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.495282 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-config-data\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.495648 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.495714 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.495755 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.495849 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.495963 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.496782 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-config-data\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.496983 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.503288 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.597442 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d9r6\" (UniqueName: \"kubernetes.io/projected/d2e8a783-170e-44cb-a505-1ee2a96572af-kube-api-access-4d9r6\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.597526 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.597633 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.597712 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.597760 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.597837 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.598266 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.598489 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.598607 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.601803 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.602004 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.614337 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d9r6\" (UniqueName: \"kubernetes.io/projected/d2e8a783-170e-44cb-a505-1ee2a96572af-kube-api-access-4d9r6\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.630553 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " pod="openstack/tempest-tests-tempest" Dec 05 15:45:49 crc kubenswrapper[4840]: I1205 15:45:49.730676 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 15:45:50 crc kubenswrapper[4840]: I1205 15:45:50.215708 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 15:45:50 crc kubenswrapper[4840]: I1205 15:45:50.530714 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d2e8a783-170e-44cb-a505-1ee2a96572af","Type":"ContainerStarted","Data":"47bf4458996aaad1f36d2747ef6d3696ce322a8ec8c4bb9c2e925df5ac892bd2"} Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.748352 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-trwn2"] Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.750885 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.791605 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-trwn2"] Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.844468 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-utilities\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.844532 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-catalog-content\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.844624 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf4zp\" (UniqueName: \"kubernetes.io/projected/4ab13447-14c1-4ec7-9937-3bda5ac9099e-kube-api-access-vf4zp\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.946076 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-utilities\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.946174 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-catalog-content\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.946288 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf4zp\" (UniqueName: \"kubernetes.io/projected/4ab13447-14c1-4ec7-9937-3bda5ac9099e-kube-api-access-vf4zp\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.946657 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-utilities\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.946760 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-catalog-content\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:03 crc kubenswrapper[4840]: I1205 15:46:03.987844 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf4zp\" (UniqueName: \"kubernetes.io/projected/4ab13447-14c1-4ec7-9937-3bda5ac9099e-kube-api-access-vf4zp\") pod \"certified-operators-trwn2\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:04 crc kubenswrapper[4840]: I1205 15:46:04.089569 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.067712 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nbj9s"] Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.072915 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.085735 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nbj9s"] Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.194793 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-utilities\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.194853 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jhtc\" (UniqueName: \"kubernetes.io/projected/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-kube-api-access-5jhtc\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.194965 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-catalog-content\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.298594 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-catalog-content\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.298739 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-utilities\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.298786 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jhtc\" (UniqueName: \"kubernetes.io/projected/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-kube-api-access-5jhtc\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.299784 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-catalog-content\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.300170 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-utilities\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.335526 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jhtc\" (UniqueName: \"kubernetes.io/projected/916556b2-0a55-4c5a-8d5a-3e42bdeaec95-kube-api-access-5jhtc\") pod \"community-operators-nbj9s\" (UID: \"916556b2-0a55-4c5a-8d5a-3e42bdeaec95\") " pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:17 crc kubenswrapper[4840]: I1205 15:46:17.406549 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:22 crc kubenswrapper[4840]: E1205 15:46:22.645243 4840 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 15:46:22 crc kubenswrapper[4840]: E1205 15:46:22.646338 4840 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4d9r6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(d2e8a783-170e-44cb-a505-1ee2a96572af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 15:46:22 crc kubenswrapper[4840]: E1205 15:46:22.647531 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="d2e8a783-170e-44cb-a505-1ee2a96572af" Dec 05 15:46:22 crc kubenswrapper[4840]: E1205 15:46:22.910427 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="d2e8a783-170e-44cb-a505-1ee2a96572af" Dec 05 15:46:22 crc kubenswrapper[4840]: I1205 15:46:22.918132 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-trwn2"] Dec 05 15:46:23 crc kubenswrapper[4840]: I1205 15:46:23.042826 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nbj9s"] Dec 05 15:46:23 crc kubenswrapper[4840]: W1205 15:46:23.046597 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod916556b2_0a55_4c5a_8d5a_3e42bdeaec95.slice/crio-5838616e08c8e845807f5fcff4d0cf7d40e81582b59a51ed1fc8cf1ee8d5202b WatchSource:0}: Error finding container 5838616e08c8e845807f5fcff4d0cf7d40e81582b59a51ed1fc8cf1ee8d5202b: Status 404 returned error can't find the container with id 5838616e08c8e845807f5fcff4d0cf7d40e81582b59a51ed1fc8cf1ee8d5202b Dec 05 15:46:23 crc kubenswrapper[4840]: I1205 15:46:23.919839 4840 generic.go:334] "Generic (PLEG): container finished" podID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerID="0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60" exitCode=0 Dec 05 15:46:23 crc kubenswrapper[4840]: I1205 15:46:23.919915 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-trwn2" event={"ID":"4ab13447-14c1-4ec7-9937-3bda5ac9099e","Type":"ContainerDied","Data":"0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60"} Dec 05 15:46:23 crc kubenswrapper[4840]: I1205 15:46:23.920206 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-trwn2" event={"ID":"4ab13447-14c1-4ec7-9937-3bda5ac9099e","Type":"ContainerStarted","Data":"b38856dfa4cb3d2462958b9d6edc67727b6f4d140c3e0298ea6092c7dd223022"} Dec 05 15:46:23 crc kubenswrapper[4840]: I1205 15:46:23.921932 4840 generic.go:334] "Generic (PLEG): container finished" podID="916556b2-0a55-4c5a-8d5a-3e42bdeaec95" containerID="d5862e6b59c3e95f66679de09dc0c40a2c9b813545af5a365a2475cf13377671" exitCode=0 Dec 05 15:46:23 crc kubenswrapper[4840]: I1205 15:46:23.921982 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nbj9s" event={"ID":"916556b2-0a55-4c5a-8d5a-3e42bdeaec95","Type":"ContainerDied","Data":"d5862e6b59c3e95f66679de09dc0c40a2c9b813545af5a365a2475cf13377671"} Dec 05 15:46:23 crc kubenswrapper[4840]: I1205 15:46:23.922012 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nbj9s" event={"ID":"916556b2-0a55-4c5a-8d5a-3e42bdeaec95","Type":"ContainerStarted","Data":"5838616e08c8e845807f5fcff4d0cf7d40e81582b59a51ed1fc8cf1ee8d5202b"} Dec 05 15:46:27 crc kubenswrapper[4840]: I1205 15:46:27.961267 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nbj9s" event={"ID":"916556b2-0a55-4c5a-8d5a-3e42bdeaec95","Type":"ContainerStarted","Data":"fe376587a669ff8118d891474c1319edd939a3ab44e29e39d9def33a16793376"} Dec 05 15:46:27 crc kubenswrapper[4840]: I1205 15:46:27.963368 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-trwn2" event={"ID":"4ab13447-14c1-4ec7-9937-3bda5ac9099e","Type":"ContainerStarted","Data":"6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c"} Dec 05 15:46:28 crc kubenswrapper[4840]: I1205 15:46:28.974636 4840 generic.go:334] "Generic (PLEG): container finished" podID="916556b2-0a55-4c5a-8d5a-3e42bdeaec95" containerID="fe376587a669ff8118d891474c1319edd939a3ab44e29e39d9def33a16793376" exitCode=0 Dec 05 15:46:28 crc kubenswrapper[4840]: I1205 15:46:28.974731 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nbj9s" event={"ID":"916556b2-0a55-4c5a-8d5a-3e42bdeaec95","Type":"ContainerDied","Data":"fe376587a669ff8118d891474c1319edd939a3ab44e29e39d9def33a16793376"} Dec 05 15:46:28 crc kubenswrapper[4840]: I1205 15:46:28.978177 4840 generic.go:334] "Generic (PLEG): container finished" podID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerID="6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c" exitCode=0 Dec 05 15:46:28 crc kubenswrapper[4840]: I1205 15:46:28.978210 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-trwn2" event={"ID":"4ab13447-14c1-4ec7-9937-3bda5ac9099e","Type":"ContainerDied","Data":"6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c"} Dec 05 15:46:29 crc kubenswrapper[4840]: I1205 15:46:29.989195 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-trwn2" event={"ID":"4ab13447-14c1-4ec7-9937-3bda5ac9099e","Type":"ContainerStarted","Data":"9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6"} Dec 05 15:46:29 crc kubenswrapper[4840]: I1205 15:46:29.996447 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nbj9s" event={"ID":"916556b2-0a55-4c5a-8d5a-3e42bdeaec95","Type":"ContainerStarted","Data":"586bb4b91ef61633352fb77cb89ac8ed1d1708d0757fb6d22e5fde7e3763b00c"} Dec 05 15:46:30 crc kubenswrapper[4840]: I1205 15:46:30.024114 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-trwn2" podStartSLOduration=21.579094863999998 podStartE2EDuration="27.024095384s" podCreationTimestamp="2025-12-05 15:46:03 +0000 UTC" firstStartedPulling="2025-12-05 15:46:23.923857008 +0000 UTC m=+2862.264919622" lastFinishedPulling="2025-12-05 15:46:29.368857518 +0000 UTC m=+2867.709920142" observedRunningTime="2025-12-05 15:46:30.020987536 +0000 UTC m=+2868.362050150" watchObservedRunningTime="2025-12-05 15:46:30.024095384 +0000 UTC m=+2868.365157998" Dec 05 15:46:30 crc kubenswrapper[4840]: I1205 15:46:30.043578 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nbj9s" podStartSLOduration=7.49195047 podStartE2EDuration="13.043555849s" podCreationTimestamp="2025-12-05 15:46:17 +0000 UTC" firstStartedPulling="2025-12-05 15:46:23.923906009 +0000 UTC m=+2862.264968663" lastFinishedPulling="2025-12-05 15:46:29.475511388 +0000 UTC m=+2867.816574042" observedRunningTime="2025-12-05 15:46:30.039687069 +0000 UTC m=+2868.380749683" watchObservedRunningTime="2025-12-05 15:46:30.043555849 +0000 UTC m=+2868.384618463" Dec 05 15:46:34 crc kubenswrapper[4840]: I1205 15:46:34.094418 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:34 crc kubenswrapper[4840]: I1205 15:46:34.095032 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:34 crc kubenswrapper[4840]: I1205 15:46:34.143526 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:35 crc kubenswrapper[4840]: I1205 15:46:35.149515 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:35 crc kubenswrapper[4840]: I1205 15:46:35.206192 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-trwn2"] Dec 05 15:46:37 crc kubenswrapper[4840]: I1205 15:46:37.081049 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-trwn2" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="registry-server" containerID="cri-o://9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6" gracePeriod=2 Dec 05 15:46:37 crc kubenswrapper[4840]: I1205 15:46:37.082031 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d2e8a783-170e-44cb-a505-1ee2a96572af","Type":"ContainerStarted","Data":"44f025ec8a947db012c1d772b880847564feb8da8ce741d29bd32729e61a5fd4"} Dec 05 15:46:37 crc kubenswrapper[4840]: I1205 15:46:37.110078 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.755094453 podStartE2EDuration="49.11005234s" podCreationTimestamp="2025-12-05 15:45:48 +0000 UTC" firstStartedPulling="2025-12-05 15:45:50.221664042 +0000 UTC m=+2828.562726656" lastFinishedPulling="2025-12-05 15:46:35.576621929 +0000 UTC m=+2873.917684543" observedRunningTime="2025-12-05 15:46:37.100786557 +0000 UTC m=+2875.441849181" watchObservedRunningTime="2025-12-05 15:46:37.11005234 +0000 UTC m=+2875.451114954" Dec 05 15:46:37 crc kubenswrapper[4840]: I1205 15:46:37.407645 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:37 crc kubenswrapper[4840]: I1205 15:46:37.407724 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:37 crc kubenswrapper[4840]: I1205 15:46:37.461883 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.354856 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.395778 4840 generic.go:334] "Generic (PLEG): container finished" podID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerID="9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6" exitCode=0 Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.400778 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-trwn2" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.412126 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-trwn2" event={"ID":"4ab13447-14c1-4ec7-9937-3bda5ac9099e","Type":"ContainerDied","Data":"9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6"} Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.412216 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-trwn2" event={"ID":"4ab13447-14c1-4ec7-9937-3bda5ac9099e","Type":"ContainerDied","Data":"b38856dfa4cb3d2462958b9d6edc67727b6f4d140c3e0298ea6092c7dd223022"} Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.412249 4840 scope.go:117] "RemoveContainer" containerID="9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.464574 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nbj9s" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.533586 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-catalog-content\") pod \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.533665 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vf4zp\" (UniqueName: \"kubernetes.io/projected/4ab13447-14c1-4ec7-9937-3bda5ac9099e-kube-api-access-vf4zp\") pod \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.533765 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-utilities\") pod \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\" (UID: \"4ab13447-14c1-4ec7-9937-3bda5ac9099e\") " Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.534582 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-utilities" (OuterVolumeSpecName: "utilities") pod "4ab13447-14c1-4ec7-9937-3bda5ac9099e" (UID: "4ab13447-14c1-4ec7-9937-3bda5ac9099e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.535207 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.541698 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ab13447-14c1-4ec7-9937-3bda5ac9099e-kube-api-access-vf4zp" (OuterVolumeSpecName: "kube-api-access-vf4zp") pod "4ab13447-14c1-4ec7-9937-3bda5ac9099e" (UID: "4ab13447-14c1-4ec7-9937-3bda5ac9099e"). InnerVolumeSpecName "kube-api-access-vf4zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.585086 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ab13447-14c1-4ec7-9937-3bda5ac9099e" (UID: "4ab13447-14c1-4ec7-9937-3bda5ac9099e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.637004 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab13447-14c1-4ec7-9937-3bda5ac9099e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.637064 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vf4zp\" (UniqueName: \"kubernetes.io/projected/4ab13447-14c1-4ec7-9937-3bda5ac9099e-kube-api-access-vf4zp\") on node \"crc\" DevicePath \"\"" Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.742985 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-trwn2"] Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.751696 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-trwn2"] Dec 05 15:46:38 crc kubenswrapper[4840]: I1205 15:46:38.992697 4840 scope.go:117] "RemoveContainer" containerID="6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c" Dec 05 15:46:39 crc kubenswrapper[4840]: I1205 15:46:39.017037 4840 scope.go:117] "RemoveContainer" containerID="0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60" Dec 05 15:46:39 crc kubenswrapper[4840]: I1205 15:46:39.061470 4840 scope.go:117] "RemoveContainer" containerID="9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6" Dec 05 15:46:39 crc kubenswrapper[4840]: E1205 15:46:39.062100 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6\": container with ID starting with 9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6 not found: ID does not exist" containerID="9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6" Dec 05 15:46:39 crc kubenswrapper[4840]: I1205 15:46:39.062142 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6"} err="failed to get container status \"9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6\": rpc error: code = NotFound desc = could not find container \"9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6\": container with ID starting with 9a0b7213b838828066644930fce24d0b6896f4a7199cd7553d5fc2bdc54667b6 not found: ID does not exist" Dec 05 15:46:39 crc kubenswrapper[4840]: I1205 15:46:39.062172 4840 scope.go:117] "RemoveContainer" containerID="6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c" Dec 05 15:46:39 crc kubenswrapper[4840]: E1205 15:46:39.062747 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c\": container with ID starting with 6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c not found: ID does not exist" containerID="6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c" Dec 05 15:46:39 crc kubenswrapper[4840]: I1205 15:46:39.062801 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c"} err="failed to get container status \"6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c\": rpc error: code = NotFound desc = could not find container \"6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c\": container with ID starting with 6be9a75f7c2095f22f3f1fd5e939603a30bb1c69ee4170df5aa0c792903f177c not found: ID does not exist" Dec 05 15:46:39 crc kubenswrapper[4840]: I1205 15:46:39.062846 4840 scope.go:117] "RemoveContainer" containerID="0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60" Dec 05 15:46:39 crc kubenswrapper[4840]: E1205 15:46:39.063136 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60\": container with ID starting with 0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60 not found: ID does not exist" containerID="0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60" Dec 05 15:46:39 crc kubenswrapper[4840]: I1205 15:46:39.063166 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60"} err="failed to get container status \"0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60\": rpc error: code = NotFound desc = could not find container \"0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60\": container with ID starting with 0d335ddaaff82d4fc0dbf65de726423f09d790496d7094d2fd6ebb79f7ee1d60 not found: ID does not exist" Dec 05 15:46:40 crc kubenswrapper[4840]: I1205 15:46:40.020693 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nbj9s"] Dec 05 15:46:40 crc kubenswrapper[4840]: I1205 15:46:40.078521 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" path="/var/lib/kubelet/pods/4ab13447-14c1-4ec7-9937-3bda5ac9099e/volumes" Dec 05 15:46:40 crc kubenswrapper[4840]: I1205 15:46:40.400785 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cfp8t"] Dec 05 15:46:40 crc kubenswrapper[4840]: I1205 15:46:40.401256 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cfp8t" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="registry-server" containerID="cri-o://685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38" gracePeriod=2 Dec 05 15:46:40 crc kubenswrapper[4840]: I1205 15:46:40.943984 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.116594 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-utilities\") pod \"bafc2f1e-3440-4f41-a248-dbc7d322249a\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.116768 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-catalog-content\") pod \"bafc2f1e-3440-4f41-a248-dbc7d322249a\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.116811 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2vb2\" (UniqueName: \"kubernetes.io/projected/bafc2f1e-3440-4f41-a248-dbc7d322249a-kube-api-access-f2vb2\") pod \"bafc2f1e-3440-4f41-a248-dbc7d322249a\" (UID: \"bafc2f1e-3440-4f41-a248-dbc7d322249a\") " Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.117488 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-utilities" (OuterVolumeSpecName: "utilities") pod "bafc2f1e-3440-4f41-a248-dbc7d322249a" (UID: "bafc2f1e-3440-4f41-a248-dbc7d322249a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.118106 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.126984 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bafc2f1e-3440-4f41-a248-dbc7d322249a-kube-api-access-f2vb2" (OuterVolumeSpecName: "kube-api-access-f2vb2") pod "bafc2f1e-3440-4f41-a248-dbc7d322249a" (UID: "bafc2f1e-3440-4f41-a248-dbc7d322249a"). InnerVolumeSpecName "kube-api-access-f2vb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.203053 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bafc2f1e-3440-4f41-a248-dbc7d322249a" (UID: "bafc2f1e-3440-4f41-a248-dbc7d322249a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.220283 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bafc2f1e-3440-4f41-a248-dbc7d322249a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.220322 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2vb2\" (UniqueName: \"kubernetes.io/projected/bafc2f1e-3440-4f41-a248-dbc7d322249a-kube-api-access-f2vb2\") on node \"crc\" DevicePath \"\"" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.444178 4840 generic.go:334] "Generic (PLEG): container finished" podID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerID="685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38" exitCode=0 Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.444469 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfp8t" event={"ID":"bafc2f1e-3440-4f41-a248-dbc7d322249a","Type":"ContainerDied","Data":"685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38"} Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.444498 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfp8t" event={"ID":"bafc2f1e-3440-4f41-a248-dbc7d322249a","Type":"ContainerDied","Data":"6b2fc392771c158563f6517bf61262172dceca83cb740e51a28efdee29a0a78c"} Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.444515 4840 scope.go:117] "RemoveContainer" containerID="685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.444626 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfp8t" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.626841 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cfp8t"] Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.628406 4840 scope.go:117] "RemoveContainer" containerID="ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.635381 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cfp8t"] Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.661733 4840 scope.go:117] "RemoveContainer" containerID="93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.703168 4840 scope.go:117] "RemoveContainer" containerID="685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38" Dec 05 15:46:41 crc kubenswrapper[4840]: E1205 15:46:41.703612 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38\": container with ID starting with 685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38 not found: ID does not exist" containerID="685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.703682 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38"} err="failed to get container status \"685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38\": rpc error: code = NotFound desc = could not find container \"685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38\": container with ID starting with 685b2d7ec1f73d8577aec023b80d6665ad6a3f995c98a8d5f24ef18c3af0db38 not found: ID does not exist" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.703738 4840 scope.go:117] "RemoveContainer" containerID="ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae" Dec 05 15:46:41 crc kubenswrapper[4840]: E1205 15:46:41.704156 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae\": container with ID starting with ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae not found: ID does not exist" containerID="ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.704202 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae"} err="failed to get container status \"ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae\": rpc error: code = NotFound desc = could not find container \"ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae\": container with ID starting with ee15c21f8faabfa652c302e409f46e00158ea9847056bc77ebaaa86eef0e4eae not found: ID does not exist" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.704218 4840 scope.go:117] "RemoveContainer" containerID="93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130" Dec 05 15:46:41 crc kubenswrapper[4840]: E1205 15:46:41.704466 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130\": container with ID starting with 93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130 not found: ID does not exist" containerID="93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130" Dec 05 15:46:41 crc kubenswrapper[4840]: I1205 15:46:41.704487 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130"} err="failed to get container status \"93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130\": rpc error: code = NotFound desc = could not find container \"93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130\": container with ID starting with 93556c8acb0109fd41b257c8d3199a7e1707ec37c4bc8ad7c5cf706920225130 not found: ID does not exist" Dec 05 15:46:42 crc kubenswrapper[4840]: I1205 15:46:42.080283 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" path="/var/lib/kubelet/pods/bafc2f1e-3440-4f41-a248-dbc7d322249a/volumes" Dec 05 15:46:49 crc kubenswrapper[4840]: I1205 15:46:49.472268 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:46:49 crc kubenswrapper[4840]: I1205 15:46:49.472801 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:47:19 crc kubenswrapper[4840]: I1205 15:47:19.472229 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:47:19 crc kubenswrapper[4840]: I1205 15:47:19.472705 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:47:49 crc kubenswrapper[4840]: I1205 15:47:49.472098 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:47:49 crc kubenswrapper[4840]: I1205 15:47:49.472786 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:47:49 crc kubenswrapper[4840]: I1205 15:47:49.472840 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:47:49 crc kubenswrapper[4840]: I1205 15:47:49.473668 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:47:49 crc kubenswrapper[4840]: I1205 15:47:49.473736 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" gracePeriod=600 Dec 05 15:47:49 crc kubenswrapper[4840]: E1205 15:47:49.594171 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:47:50 crc kubenswrapper[4840]: I1205 15:47:50.101987 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" exitCode=0 Dec 05 15:47:50 crc kubenswrapper[4840]: I1205 15:47:50.102034 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d"} Dec 05 15:47:50 crc kubenswrapper[4840]: I1205 15:47:50.102071 4840 scope.go:117] "RemoveContainer" containerID="5e57f28200b123d8625a5f66153ca84da704d74d1056a91e0747e528997298e9" Dec 05 15:47:50 crc kubenswrapper[4840]: I1205 15:47:50.102628 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:47:50 crc kubenswrapper[4840]: E1205 15:47:50.102952 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:48:01 crc kubenswrapper[4840]: I1205 15:48:01.067500 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:48:01 crc kubenswrapper[4840]: E1205 15:48:01.068711 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:48:13 crc kubenswrapper[4840]: I1205 15:48:13.066612 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:48:13 crc kubenswrapper[4840]: E1205 15:48:13.067403 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:48:24 crc kubenswrapper[4840]: I1205 15:48:24.072858 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:48:24 crc kubenswrapper[4840]: E1205 15:48:24.073890 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:48:39 crc kubenswrapper[4840]: I1205 15:48:39.067039 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:48:39 crc kubenswrapper[4840]: E1205 15:48:39.067792 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:48:54 crc kubenswrapper[4840]: I1205 15:48:54.067582 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:48:54 crc kubenswrapper[4840]: E1205 15:48:54.068637 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:49:08 crc kubenswrapper[4840]: I1205 15:49:08.066682 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:49:08 crc kubenswrapper[4840]: E1205 15:49:08.067816 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.299803 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vtcpb"] Dec 05 15:49:14 crc kubenswrapper[4840]: E1205 15:49:14.300756 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="extract-content" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.300773 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="extract-content" Dec 05 15:49:14 crc kubenswrapper[4840]: E1205 15:49:14.300789 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="extract-utilities" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.300797 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="extract-utilities" Dec 05 15:49:14 crc kubenswrapper[4840]: E1205 15:49:14.300838 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="extract-utilities" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.300846 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="extract-utilities" Dec 05 15:49:14 crc kubenswrapper[4840]: E1205 15:49:14.300859 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="registry-server" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.300882 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="registry-server" Dec 05 15:49:14 crc kubenswrapper[4840]: E1205 15:49:14.300906 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="registry-server" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.300914 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="registry-server" Dec 05 15:49:14 crc kubenswrapper[4840]: E1205 15:49:14.300928 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="extract-content" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.300936 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="extract-content" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.301148 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="bafc2f1e-3440-4f41-a248-dbc7d322249a" containerName="registry-server" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.301178 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ab13447-14c1-4ec7-9937-3bda5ac9099e" containerName="registry-server" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.302918 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.309628 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtcpb"] Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.412466 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-catalog-content\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.412598 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-utilities\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.412664 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v488b\" (UniqueName: \"kubernetes.io/projected/a1200ba8-21f4-4ea9-86aa-67c5e3434419-kube-api-access-v488b\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.514446 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-utilities\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.514558 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v488b\" (UniqueName: \"kubernetes.io/projected/a1200ba8-21f4-4ea9-86aa-67c5e3434419-kube-api-access-v488b\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.514680 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-catalog-content\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.515146 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-utilities\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.515292 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-catalog-content\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.536228 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v488b\" (UniqueName: \"kubernetes.io/projected/a1200ba8-21f4-4ea9-86aa-67c5e3434419-kube-api-access-v488b\") pod \"redhat-marketplace-vtcpb\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:14 crc kubenswrapper[4840]: I1205 15:49:14.639324 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:15 crc kubenswrapper[4840]: I1205 15:49:15.146754 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtcpb"] Dec 05 15:49:15 crc kubenswrapper[4840]: E1205 15:49:15.541637 4840 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1200ba8_21f4_4ea9_86aa_67c5e3434419.slice/crio-a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1200ba8_21f4_4ea9_86aa_67c5e3434419.slice/crio-conmon-a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2.scope\": RecentStats: unable to find data in memory cache]" Dec 05 15:49:15 crc kubenswrapper[4840]: I1205 15:49:15.935995 4840 generic.go:334] "Generic (PLEG): container finished" podID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerID="a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2" exitCode=0 Dec 05 15:49:15 crc kubenswrapper[4840]: I1205 15:49:15.936066 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtcpb" event={"ID":"a1200ba8-21f4-4ea9-86aa-67c5e3434419","Type":"ContainerDied","Data":"a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2"} Dec 05 15:49:15 crc kubenswrapper[4840]: I1205 15:49:15.936113 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtcpb" event={"ID":"a1200ba8-21f4-4ea9-86aa-67c5e3434419","Type":"ContainerStarted","Data":"358087a647cdfa86bfb7cf99a67208bf63ba7ea36a5e088d21a850cc7976247e"} Dec 05 15:49:15 crc kubenswrapper[4840]: I1205 15:49:15.938793 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:49:16 crc kubenswrapper[4840]: I1205 15:49:16.947147 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtcpb" event={"ID":"a1200ba8-21f4-4ea9-86aa-67c5e3434419","Type":"ContainerStarted","Data":"83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e"} Dec 05 15:49:17 crc kubenswrapper[4840]: I1205 15:49:17.960438 4840 generic.go:334] "Generic (PLEG): container finished" podID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerID="83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e" exitCode=0 Dec 05 15:49:17 crc kubenswrapper[4840]: I1205 15:49:17.960507 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtcpb" event={"ID":"a1200ba8-21f4-4ea9-86aa-67c5e3434419","Type":"ContainerDied","Data":"83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e"} Dec 05 15:49:18 crc kubenswrapper[4840]: I1205 15:49:18.976937 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtcpb" event={"ID":"a1200ba8-21f4-4ea9-86aa-67c5e3434419","Type":"ContainerStarted","Data":"6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a"} Dec 05 15:49:19 crc kubenswrapper[4840]: I1205 15:49:19.000549 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vtcpb" podStartSLOduration=2.577290049 podStartE2EDuration="5.000527155s" podCreationTimestamp="2025-12-05 15:49:14 +0000 UTC" firstStartedPulling="2025-12-05 15:49:15.938455708 +0000 UTC m=+3034.279518342" lastFinishedPulling="2025-12-05 15:49:18.361692834 +0000 UTC m=+3036.702755448" observedRunningTime="2025-12-05 15:49:18.999484505 +0000 UTC m=+3037.340547139" watchObservedRunningTime="2025-12-05 15:49:19.000527155 +0000 UTC m=+3037.341589779" Dec 05 15:49:19 crc kubenswrapper[4840]: I1205 15:49:19.067025 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:49:19 crc kubenswrapper[4840]: E1205 15:49:19.067340 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:49:24 crc kubenswrapper[4840]: I1205 15:49:24.640372 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:24 crc kubenswrapper[4840]: I1205 15:49:24.641201 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:24 crc kubenswrapper[4840]: I1205 15:49:24.698763 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:25 crc kubenswrapper[4840]: I1205 15:49:25.077283 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:25 crc kubenswrapper[4840]: I1205 15:49:25.121004 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtcpb"] Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.040996 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vtcpb" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="registry-server" containerID="cri-o://6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a" gracePeriod=2 Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.558584 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.676719 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-utilities\") pod \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.676900 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v488b\" (UniqueName: \"kubernetes.io/projected/a1200ba8-21f4-4ea9-86aa-67c5e3434419-kube-api-access-v488b\") pod \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.676997 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-catalog-content\") pod \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\" (UID: \"a1200ba8-21f4-4ea9-86aa-67c5e3434419\") " Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.677844 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-utilities" (OuterVolumeSpecName: "utilities") pod "a1200ba8-21f4-4ea9-86aa-67c5e3434419" (UID: "a1200ba8-21f4-4ea9-86aa-67c5e3434419"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.686017 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1200ba8-21f4-4ea9-86aa-67c5e3434419-kube-api-access-v488b" (OuterVolumeSpecName: "kube-api-access-v488b") pod "a1200ba8-21f4-4ea9-86aa-67c5e3434419" (UID: "a1200ba8-21f4-4ea9-86aa-67c5e3434419"). InnerVolumeSpecName "kube-api-access-v488b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.727331 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a1200ba8-21f4-4ea9-86aa-67c5e3434419" (UID: "a1200ba8-21f4-4ea9-86aa-67c5e3434419"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.778935 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v488b\" (UniqueName: \"kubernetes.io/projected/a1200ba8-21f4-4ea9-86aa-67c5e3434419-kube-api-access-v488b\") on node \"crc\" DevicePath \"\"" Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.778975 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:49:27 crc kubenswrapper[4840]: I1205 15:49:27.778987 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1200ba8-21f4-4ea9-86aa-67c5e3434419-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.050648 4840 generic.go:334] "Generic (PLEG): container finished" podID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerID="6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a" exitCode=0 Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.050696 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtcpb" event={"ID":"a1200ba8-21f4-4ea9-86aa-67c5e3434419","Type":"ContainerDied","Data":"6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a"} Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.050755 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vtcpb" event={"ID":"a1200ba8-21f4-4ea9-86aa-67c5e3434419","Type":"ContainerDied","Data":"358087a647cdfa86bfb7cf99a67208bf63ba7ea36a5e088d21a850cc7976247e"} Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.050777 4840 scope.go:117] "RemoveContainer" containerID="6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.050719 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vtcpb" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.091043 4840 scope.go:117] "RemoveContainer" containerID="83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.101258 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtcpb"] Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.114802 4840 scope.go:117] "RemoveContainer" containerID="a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.117232 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vtcpb"] Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.161852 4840 scope.go:117] "RemoveContainer" containerID="6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a" Dec 05 15:49:28 crc kubenswrapper[4840]: E1205 15:49:28.162233 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a\": container with ID starting with 6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a not found: ID does not exist" containerID="6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.162270 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a"} err="failed to get container status \"6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a\": rpc error: code = NotFound desc = could not find container \"6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a\": container with ID starting with 6880392b3645e131b333c9e0d0d9d59aad7adec705be54d3adfc5797e229a62a not found: ID does not exist" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.162292 4840 scope.go:117] "RemoveContainer" containerID="83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e" Dec 05 15:49:28 crc kubenswrapper[4840]: E1205 15:49:28.162512 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e\": container with ID starting with 83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e not found: ID does not exist" containerID="83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.162531 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e"} err="failed to get container status \"83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e\": rpc error: code = NotFound desc = could not find container \"83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e\": container with ID starting with 83975d63910438aca0ebc30774ecb5095ef198caf86603aa477493509e691b1e not found: ID does not exist" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.162545 4840 scope.go:117] "RemoveContainer" containerID="a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2" Dec 05 15:49:28 crc kubenswrapper[4840]: E1205 15:49:28.162775 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2\": container with ID starting with a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2 not found: ID does not exist" containerID="a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2" Dec 05 15:49:28 crc kubenswrapper[4840]: I1205 15:49:28.162793 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2"} err="failed to get container status \"a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2\": rpc error: code = NotFound desc = could not find container \"a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2\": container with ID starting with a9d7ae68bac3495b5faf7f4d7206703e9989b2db34bcab50aebd6aa7f39716c2 not found: ID does not exist" Dec 05 15:49:30 crc kubenswrapper[4840]: I1205 15:49:30.080685 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" path="/var/lib/kubelet/pods/a1200ba8-21f4-4ea9-86aa-67c5e3434419/volumes" Dec 05 15:49:34 crc kubenswrapper[4840]: I1205 15:49:34.068684 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:49:34 crc kubenswrapper[4840]: E1205 15:49:34.069534 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:49:49 crc kubenswrapper[4840]: I1205 15:49:49.066818 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:49:49 crc kubenswrapper[4840]: E1205 15:49:49.067839 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:50:00 crc kubenswrapper[4840]: I1205 15:50:00.068018 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:50:00 crc kubenswrapper[4840]: E1205 15:50:00.068794 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:50:14 crc kubenswrapper[4840]: I1205 15:50:14.066833 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:50:14 crc kubenswrapper[4840]: E1205 15:50:14.067631 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:50:27 crc kubenswrapper[4840]: I1205 15:50:27.066719 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:50:27 crc kubenswrapper[4840]: E1205 15:50:27.068792 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:50:42 crc kubenswrapper[4840]: I1205 15:50:42.067442 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:50:42 crc kubenswrapper[4840]: E1205 15:50:42.068251 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:50:54 crc kubenswrapper[4840]: I1205 15:50:54.066572 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:50:54 crc kubenswrapper[4840]: E1205 15:50:54.067549 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:51:05 crc kubenswrapper[4840]: I1205 15:51:05.068401 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:51:05 crc kubenswrapper[4840]: E1205 15:51:05.069756 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:51:17 crc kubenswrapper[4840]: I1205 15:51:17.068083 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:51:17 crc kubenswrapper[4840]: E1205 15:51:17.069081 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:51:28 crc kubenswrapper[4840]: I1205 15:51:28.067302 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:51:28 crc kubenswrapper[4840]: E1205 15:51:28.068276 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:51:41 crc kubenswrapper[4840]: I1205 15:51:41.066823 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:51:41 crc kubenswrapper[4840]: E1205 15:51:41.067568 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:51:55 crc kubenswrapper[4840]: I1205 15:51:55.067427 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:51:55 crc kubenswrapper[4840]: E1205 15:51:55.068172 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:52:07 crc kubenswrapper[4840]: I1205 15:52:07.067477 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:52:07 crc kubenswrapper[4840]: E1205 15:52:07.068391 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:52:18 crc kubenswrapper[4840]: I1205 15:52:18.067501 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:52:18 crc kubenswrapper[4840]: E1205 15:52:18.068474 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:52:32 crc kubenswrapper[4840]: I1205 15:52:32.073851 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:52:32 crc kubenswrapper[4840]: E1205 15:52:32.074910 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:52:43 crc kubenswrapper[4840]: I1205 15:52:43.067475 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:52:43 crc kubenswrapper[4840]: E1205 15:52:43.068731 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:52:58 crc kubenswrapper[4840]: I1205 15:52:58.066641 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:52:58 crc kubenswrapper[4840]: I1205 15:52:58.357249 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"2b478102a94947c649d5d899c2c867efcfa2b258c8381dcbdcfbcf9f4ef1907a"} Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.512449 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9jhhq"] Dec 05 15:54:49 crc kubenswrapper[4840]: E1205 15:54:49.513580 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="extract-utilities" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.513607 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="extract-utilities" Dec 05 15:54:49 crc kubenswrapper[4840]: E1205 15:54:49.513633 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="registry-server" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.513641 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="registry-server" Dec 05 15:54:49 crc kubenswrapper[4840]: E1205 15:54:49.513665 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="extract-content" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.513674 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="extract-content" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.514340 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1200ba8-21f4-4ea9-86aa-67c5e3434419" containerName="registry-server" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.516355 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.532519 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9jhhq"] Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.748829 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-catalog-content\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.750634 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-utilities\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.750762 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x658j\" (UniqueName: \"kubernetes.io/projected/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-kube-api-access-x658j\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.852158 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x658j\" (UniqueName: \"kubernetes.io/projected/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-kube-api-access-x658j\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.852313 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-catalog-content\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.852388 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-utilities\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.852922 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-utilities\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.853151 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-catalog-content\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.873637 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x658j\" (UniqueName: \"kubernetes.io/projected/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-kube-api-access-x658j\") pod \"redhat-operators-9jhhq\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:49 crc kubenswrapper[4840]: I1205 15:54:49.951653 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:50 crc kubenswrapper[4840]: I1205 15:54:50.439720 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9jhhq"] Dec 05 15:54:50 crc kubenswrapper[4840]: I1205 15:54:50.539767 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jhhq" event={"ID":"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e","Type":"ContainerStarted","Data":"a4cb7b7265c613e3a3161bd791e5276871f20dd20cb7fc0374b670b22269a4dc"} Dec 05 15:54:51 crc kubenswrapper[4840]: I1205 15:54:51.551362 4840 generic.go:334] "Generic (PLEG): container finished" podID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerID="22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe" exitCode=0 Dec 05 15:54:51 crc kubenswrapper[4840]: I1205 15:54:51.551417 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jhhq" event={"ID":"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e","Type":"ContainerDied","Data":"22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe"} Dec 05 15:54:51 crc kubenswrapper[4840]: I1205 15:54:51.553468 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 15:54:52 crc kubenswrapper[4840]: I1205 15:54:52.566583 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jhhq" event={"ID":"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e","Type":"ContainerStarted","Data":"ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632"} Dec 05 15:54:54 crc kubenswrapper[4840]: I1205 15:54:54.587189 4840 generic.go:334] "Generic (PLEG): container finished" podID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerID="ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632" exitCode=0 Dec 05 15:54:54 crc kubenswrapper[4840]: I1205 15:54:54.587274 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jhhq" event={"ID":"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e","Type":"ContainerDied","Data":"ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632"} Dec 05 15:54:55 crc kubenswrapper[4840]: I1205 15:54:55.598973 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jhhq" event={"ID":"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e","Type":"ContainerStarted","Data":"29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96"} Dec 05 15:54:55 crc kubenswrapper[4840]: I1205 15:54:55.627166 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9jhhq" podStartSLOduration=3.073569773 podStartE2EDuration="6.627146969s" podCreationTimestamp="2025-12-05 15:54:49 +0000 UTC" firstStartedPulling="2025-12-05 15:54:51.553225658 +0000 UTC m=+3369.894288262" lastFinishedPulling="2025-12-05 15:54:55.106802844 +0000 UTC m=+3373.447865458" observedRunningTime="2025-12-05 15:54:55.6208497 +0000 UTC m=+3373.961912314" watchObservedRunningTime="2025-12-05 15:54:55.627146969 +0000 UTC m=+3373.968209583" Dec 05 15:54:59 crc kubenswrapper[4840]: I1205 15:54:59.951971 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:54:59 crc kubenswrapper[4840]: I1205 15:54:59.953023 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:55:01 crc kubenswrapper[4840]: I1205 15:55:01.002037 4840 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9jhhq" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="registry-server" probeResult="failure" output=< Dec 05 15:55:01 crc kubenswrapper[4840]: timeout: failed to connect service ":50051" within 1s Dec 05 15:55:01 crc kubenswrapper[4840]: > Dec 05 15:55:10 crc kubenswrapper[4840]: I1205 15:55:10.065565 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:55:10 crc kubenswrapper[4840]: I1205 15:55:10.122467 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:55:10 crc kubenswrapper[4840]: I1205 15:55:10.300715 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9jhhq"] Dec 05 15:55:11 crc kubenswrapper[4840]: I1205 15:55:11.743517 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9jhhq" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="registry-server" containerID="cri-o://29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96" gracePeriod=2 Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.289071 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.479124 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-catalog-content\") pod \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.479203 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x658j\" (UniqueName: \"kubernetes.io/projected/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-kube-api-access-x658j\") pod \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.479359 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-utilities\") pod \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\" (UID: \"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e\") " Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.480799 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-utilities" (OuterVolumeSpecName: "utilities") pod "d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" (UID: "d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.486712 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-kube-api-access-x658j" (OuterVolumeSpecName: "kube-api-access-x658j") pod "d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" (UID: "d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e"). InnerVolumeSpecName "kube-api-access-x658j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.581651 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x658j\" (UniqueName: \"kubernetes.io/projected/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-kube-api-access-x658j\") on node \"crc\" DevicePath \"\"" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.581684 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.605060 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" (UID: "d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.684137 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.760712 4840 generic.go:334] "Generic (PLEG): container finished" podID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerID="29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96" exitCode=0 Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.760811 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jhhq" event={"ID":"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e","Type":"ContainerDied","Data":"29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96"} Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.760851 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9jhhq" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.761257 4840 scope.go:117] "RemoveContainer" containerID="29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.761236 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9jhhq" event={"ID":"d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e","Type":"ContainerDied","Data":"a4cb7b7265c613e3a3161bd791e5276871f20dd20cb7fc0374b670b22269a4dc"} Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.787968 4840 scope.go:117] "RemoveContainer" containerID="ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.809181 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9jhhq"] Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.816979 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9jhhq"] Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.823162 4840 scope.go:117] "RemoveContainer" containerID="22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.869231 4840 scope.go:117] "RemoveContainer" containerID="29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96" Dec 05 15:55:12 crc kubenswrapper[4840]: E1205 15:55:12.869683 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96\": container with ID starting with 29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96 not found: ID does not exist" containerID="29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.869730 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96"} err="failed to get container status \"29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96\": rpc error: code = NotFound desc = could not find container \"29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96\": container with ID starting with 29aefffc602fe102ae50c10263dda9977484145b137bddadef15dbade4339b96 not found: ID does not exist" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.869756 4840 scope.go:117] "RemoveContainer" containerID="ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632" Dec 05 15:55:12 crc kubenswrapper[4840]: E1205 15:55:12.870188 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632\": container with ID starting with ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632 not found: ID does not exist" containerID="ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.870215 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632"} err="failed to get container status \"ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632\": rpc error: code = NotFound desc = could not find container \"ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632\": container with ID starting with ca43b9afd688b560aa68f833f689c6312478ce4d1d47522c3a8f5137df6e6632 not found: ID does not exist" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.870235 4840 scope.go:117] "RemoveContainer" containerID="22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe" Dec 05 15:55:12 crc kubenswrapper[4840]: E1205 15:55:12.870622 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe\": container with ID starting with 22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe not found: ID does not exist" containerID="22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe" Dec 05 15:55:12 crc kubenswrapper[4840]: I1205 15:55:12.870645 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe"} err="failed to get container status \"22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe\": rpc error: code = NotFound desc = could not find container \"22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe\": container with ID starting with 22a8416fcc2c6a824062ccd609be7987ddc037b8bfc2abc7ca96c44417de5bfe not found: ID does not exist" Dec 05 15:55:14 crc kubenswrapper[4840]: I1205 15:55:14.079476 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" path="/var/lib/kubelet/pods/d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e/volumes" Dec 05 15:55:19 crc kubenswrapper[4840]: I1205 15:55:19.471996 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:55:19 crc kubenswrapper[4840]: I1205 15:55:19.472608 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:55:49 crc kubenswrapper[4840]: I1205 15:55:49.472600 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:55:49 crc kubenswrapper[4840]: I1205 15:55:49.473388 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:56:19 crc kubenswrapper[4840]: I1205 15:56:19.471945 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:56:19 crc kubenswrapper[4840]: I1205 15:56:19.472499 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:56:19 crc kubenswrapper[4840]: I1205 15:56:19.472547 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:56:19 crc kubenswrapper[4840]: I1205 15:56:19.473298 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2b478102a94947c649d5d899c2c867efcfa2b258c8381dcbdcfbcf9f4ef1907a"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:56:19 crc kubenswrapper[4840]: I1205 15:56:19.473367 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://2b478102a94947c649d5d899c2c867efcfa2b258c8381dcbdcfbcf9f4ef1907a" gracePeriod=600 Dec 05 15:56:20 crc kubenswrapper[4840]: I1205 15:56:20.372386 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="2b478102a94947c649d5d899c2c867efcfa2b258c8381dcbdcfbcf9f4ef1907a" exitCode=0 Dec 05 15:56:20 crc kubenswrapper[4840]: I1205 15:56:20.372451 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"2b478102a94947c649d5d899c2c867efcfa2b258c8381dcbdcfbcf9f4ef1907a"} Dec 05 15:56:20 crc kubenswrapper[4840]: I1205 15:56:20.373028 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96"} Dec 05 15:56:20 crc kubenswrapper[4840]: I1205 15:56:20.373052 4840 scope.go:117] "RemoveContainer" containerID="2a9c093821c523551e7e91b9044d14359932cc05f05ec367b12471a7412df40d" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.914734 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ttzxt"] Dec 05 15:57:20 crc kubenswrapper[4840]: E1205 15:57:20.929194 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="extract-utilities" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.929514 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="extract-utilities" Dec 05 15:57:20 crc kubenswrapper[4840]: E1205 15:57:20.929556 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="registry-server" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.929569 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="registry-server" Dec 05 15:57:20 crc kubenswrapper[4840]: E1205 15:57:20.929604 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="extract-content" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.929611 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="extract-content" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.931655 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2a70241-6c16-4c4c-a6cb-3a5ae6591a3e" containerName="registry-server" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.939984 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.943704 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ttzxt"] Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.970054 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-utilities\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.970112 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd8wf\" (UniqueName: \"kubernetes.io/projected/b1931e75-0ea1-4e62-9fc4-599a94808f0f-kube-api-access-fd8wf\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:20 crc kubenswrapper[4840]: I1205 15:57:20.970190 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-catalog-content\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.071397 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd8wf\" (UniqueName: \"kubernetes.io/projected/b1931e75-0ea1-4e62-9fc4-599a94808f0f-kube-api-access-fd8wf\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.071505 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-catalog-content\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.071659 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-utilities\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.072082 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-catalog-content\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.072120 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-utilities\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.091430 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd8wf\" (UniqueName: \"kubernetes.io/projected/b1931e75-0ea1-4e62-9fc4-599a94808f0f-kube-api-access-fd8wf\") pod \"community-operators-ttzxt\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.291306 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.808207 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ttzxt"] Dec 05 15:57:21 crc kubenswrapper[4840]: W1205 15:57:21.819662 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1931e75_0ea1_4e62_9fc4_599a94808f0f.slice/crio-fe84779a7788e2dd47683c436bcdb563d406a4739e47b38521a007457889fb69 WatchSource:0}: Error finding container fe84779a7788e2dd47683c436bcdb563d406a4739e47b38521a007457889fb69: Status 404 returned error can't find the container with id fe84779a7788e2dd47683c436bcdb563d406a4739e47b38521a007457889fb69 Dec 05 15:57:21 crc kubenswrapper[4840]: I1205 15:57:21.957523 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttzxt" event={"ID":"b1931e75-0ea1-4e62-9fc4-599a94808f0f","Type":"ContainerStarted","Data":"fe84779a7788e2dd47683c436bcdb563d406a4739e47b38521a007457889fb69"} Dec 05 15:57:22 crc kubenswrapper[4840]: I1205 15:57:22.968910 4840 generic.go:334] "Generic (PLEG): container finished" podID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerID="402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9" exitCode=0 Dec 05 15:57:22 crc kubenswrapper[4840]: I1205 15:57:22.968981 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttzxt" event={"ID":"b1931e75-0ea1-4e62-9fc4-599a94808f0f","Type":"ContainerDied","Data":"402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9"} Dec 05 15:57:23 crc kubenswrapper[4840]: I1205 15:57:23.979606 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttzxt" event={"ID":"b1931e75-0ea1-4e62-9fc4-599a94808f0f","Type":"ContainerStarted","Data":"44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7"} Dec 05 15:57:24 crc kubenswrapper[4840]: I1205 15:57:24.989482 4840 generic.go:334] "Generic (PLEG): container finished" podID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerID="44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7" exitCode=0 Dec 05 15:57:24 crc kubenswrapper[4840]: I1205 15:57:24.989524 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttzxt" event={"ID":"b1931e75-0ea1-4e62-9fc4-599a94808f0f","Type":"ContainerDied","Data":"44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7"} Dec 05 15:57:26 crc kubenswrapper[4840]: I1205 15:57:26.011085 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttzxt" event={"ID":"b1931e75-0ea1-4e62-9fc4-599a94808f0f","Type":"ContainerStarted","Data":"dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893"} Dec 05 15:57:26 crc kubenswrapper[4840]: I1205 15:57:26.037064 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ttzxt" podStartSLOduration=3.599646207 podStartE2EDuration="6.037045791s" podCreationTimestamp="2025-12-05 15:57:20 +0000 UTC" firstStartedPulling="2025-12-05 15:57:22.970954857 +0000 UTC m=+3521.312017471" lastFinishedPulling="2025-12-05 15:57:25.408354441 +0000 UTC m=+3523.749417055" observedRunningTime="2025-12-05 15:57:26.031594516 +0000 UTC m=+3524.372657150" watchObservedRunningTime="2025-12-05 15:57:26.037045791 +0000 UTC m=+3524.378108405" Dec 05 15:57:28 crc kubenswrapper[4840]: I1205 15:57:28.029008 4840 generic.go:334] "Generic (PLEG): container finished" podID="d2e8a783-170e-44cb-a505-1ee2a96572af" containerID="44f025ec8a947db012c1d772b880847564feb8da8ce741d29bd32729e61a5fd4" exitCode=0 Dec 05 15:57:28 crc kubenswrapper[4840]: I1205 15:57:28.029131 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d2e8a783-170e-44cb-a505-1ee2a96572af","Type":"ContainerDied","Data":"44f025ec8a947db012c1d772b880847564feb8da8ce741d29bd32729e61a5fd4"} Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.379700 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.531765 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-temporary\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532089 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-workdir\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532151 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config-secret\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532204 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d9r6\" (UniqueName: \"kubernetes.io/projected/d2e8a783-170e-44cb-a505-1ee2a96572af-kube-api-access-4d9r6\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532264 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532429 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532494 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-config-data\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532611 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ssh-key\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532620 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.532697 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ca-certs\") pod \"d2e8a783-170e-44cb-a505-1ee2a96572af\" (UID: \"d2e8a783-170e-44cb-a505-1ee2a96572af\") " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.533461 4840 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.534181 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-config-data" (OuterVolumeSpecName: "config-data") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.539686 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.540016 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2e8a783-170e-44cb-a505-1ee2a96572af-kube-api-access-4d9r6" (OuterVolumeSpecName: "kube-api-access-4d9r6") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "kube-api-access-4d9r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.542583 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.569109 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.578220 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.588156 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.598742 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d2e8a783-170e-44cb-a505-1ee2a96572af" (UID: "d2e8a783-170e-44cb-a505-1ee2a96572af"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635029 4840 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635060 4840 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d2e8a783-170e-44cb-a505-1ee2a96572af-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635072 4840 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635085 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d9r6\" (UniqueName: \"kubernetes.io/projected/d2e8a783-170e-44cb-a505-1ee2a96572af-kube-api-access-4d9r6\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635095 4840 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635153 4840 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635165 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d2e8a783-170e-44cb-a505-1ee2a96572af-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.635173 4840 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2e8a783-170e-44cb-a505-1ee2a96572af-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.665187 4840 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 05 15:57:29 crc kubenswrapper[4840]: I1205 15:57:29.737536 4840 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:30 crc kubenswrapper[4840]: I1205 15:57:30.052307 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d2e8a783-170e-44cb-a505-1ee2a96572af","Type":"ContainerDied","Data":"47bf4458996aaad1f36d2747ef6d3696ce322a8ec8c4bb9c2e925df5ac892bd2"} Dec 05 15:57:30 crc kubenswrapper[4840]: I1205 15:57:30.052353 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47bf4458996aaad1f36d2747ef6d3696ce322a8ec8c4bb9c2e925df5ac892bd2" Dec 05 15:57:30 crc kubenswrapper[4840]: I1205 15:57:30.052538 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 15:57:31 crc kubenswrapper[4840]: I1205 15:57:31.292129 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:31 crc kubenswrapper[4840]: I1205 15:57:31.293041 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:31 crc kubenswrapper[4840]: I1205 15:57:31.346546 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:32 crc kubenswrapper[4840]: I1205 15:57:32.125343 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:32 crc kubenswrapper[4840]: I1205 15:57:32.183404 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ttzxt"] Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.099944 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ttzxt" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="registry-server" containerID="cri-o://dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893" gracePeriod=2 Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.822890 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.903112 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fd8wf\" (UniqueName: \"kubernetes.io/projected/b1931e75-0ea1-4e62-9fc4-599a94808f0f-kube-api-access-fd8wf\") pod \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.903182 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-catalog-content\") pod \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.903415 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-utilities\") pod \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\" (UID: \"b1931e75-0ea1-4e62-9fc4-599a94808f0f\") " Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.904644 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-utilities" (OuterVolumeSpecName: "utilities") pod "b1931e75-0ea1-4e62-9fc4-599a94808f0f" (UID: "b1931e75-0ea1-4e62-9fc4-599a94808f0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.909608 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1931e75-0ea1-4e62-9fc4-599a94808f0f-kube-api-access-fd8wf" (OuterVolumeSpecName: "kube-api-access-fd8wf") pod "b1931e75-0ea1-4e62-9fc4-599a94808f0f" (UID: "b1931e75-0ea1-4e62-9fc4-599a94808f0f"). InnerVolumeSpecName "kube-api-access-fd8wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:57:34 crc kubenswrapper[4840]: I1205 15:57:34.958837 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1931e75-0ea1-4e62-9fc4-599a94808f0f" (UID: "b1931e75-0ea1-4e62-9fc4-599a94808f0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.006943 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.007300 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fd8wf\" (UniqueName: \"kubernetes.io/projected/b1931e75-0ea1-4e62-9fc4-599a94808f0f-kube-api-access-fd8wf\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.007452 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1931e75-0ea1-4e62-9fc4-599a94808f0f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.110482 4840 generic.go:334] "Generic (PLEG): container finished" podID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerID="dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893" exitCode=0 Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.110538 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ttzxt" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.110549 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttzxt" event={"ID":"b1931e75-0ea1-4e62-9fc4-599a94808f0f","Type":"ContainerDied","Data":"dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893"} Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.110893 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ttzxt" event={"ID":"b1931e75-0ea1-4e62-9fc4-599a94808f0f","Type":"ContainerDied","Data":"fe84779a7788e2dd47683c436bcdb563d406a4739e47b38521a007457889fb69"} Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.110913 4840 scope.go:117] "RemoveContainer" containerID="dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.130326 4840 scope.go:117] "RemoveContainer" containerID="44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.152787 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ttzxt"] Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.164181 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ttzxt"] Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.175195 4840 scope.go:117] "RemoveContainer" containerID="402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.213327 4840 scope.go:117] "RemoveContainer" containerID="dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893" Dec 05 15:57:35 crc kubenswrapper[4840]: E1205 15:57:35.213818 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893\": container with ID starting with dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893 not found: ID does not exist" containerID="dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.213847 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893"} err="failed to get container status \"dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893\": rpc error: code = NotFound desc = could not find container \"dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893\": container with ID starting with dba1ed73ed4d649a82b22d62e26e626c42b49ca675285df4241bc0d08f6a3893 not found: ID does not exist" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.213883 4840 scope.go:117] "RemoveContainer" containerID="44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7" Dec 05 15:57:35 crc kubenswrapper[4840]: E1205 15:57:35.214075 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7\": container with ID starting with 44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7 not found: ID does not exist" containerID="44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.214095 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7"} err="failed to get container status \"44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7\": rpc error: code = NotFound desc = could not find container \"44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7\": container with ID starting with 44095afe53e0e5ae8ae328c310d972c50babf46a45ea0a3ce306db2c0c5fa0b7 not found: ID does not exist" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.214110 4840 scope.go:117] "RemoveContainer" containerID="402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9" Dec 05 15:57:35 crc kubenswrapper[4840]: E1205 15:57:35.214490 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9\": container with ID starting with 402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9 not found: ID does not exist" containerID="402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9" Dec 05 15:57:35 crc kubenswrapper[4840]: I1205 15:57:35.214510 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9"} err="failed to get container status \"402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9\": rpc error: code = NotFound desc = could not find container \"402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9\": container with ID starting with 402ab673e06857ff33924be3953c5c072b0c3668524632e5a5940e41012c5ee9 not found: ID does not exist" Dec 05 15:57:36 crc kubenswrapper[4840]: I1205 15:57:36.079838 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" path="/var/lib/kubelet/pods/b1931e75-0ea1-4e62-9fc4-599a94808f0f/volumes" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.572755 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 15:57:40 crc kubenswrapper[4840]: E1205 15:57:40.573760 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="registry-server" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.573775 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="registry-server" Dec 05 15:57:40 crc kubenswrapper[4840]: E1205 15:57:40.573801 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2e8a783-170e-44cb-a505-1ee2a96572af" containerName="tempest-tests-tempest-tests-runner" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.573808 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2e8a783-170e-44cb-a505-1ee2a96572af" containerName="tempest-tests-tempest-tests-runner" Dec 05 15:57:40 crc kubenswrapper[4840]: E1205 15:57:40.573819 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="extract-content" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.573825 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="extract-content" Dec 05 15:57:40 crc kubenswrapper[4840]: E1205 15:57:40.573840 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="extract-utilities" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.573847 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="extract-utilities" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.574083 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1931e75-0ea1-4e62-9fc4-599a94808f0f" containerName="registry-server" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.574106 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2e8a783-170e-44cb-a505-1ee2a96572af" containerName="tempest-tests-tempest-tests-runner" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.574889 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.577491 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mgrdk" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.586935 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.642303 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f37d61a2-7a86-40a3-9394-0ca13367e28f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.642368 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbc47\" (UniqueName: \"kubernetes.io/projected/f37d61a2-7a86-40a3-9394-0ca13367e28f-kube-api-access-fbc47\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f37d61a2-7a86-40a3-9394-0ca13367e28f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.743478 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f37d61a2-7a86-40a3-9394-0ca13367e28f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.743551 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbc47\" (UniqueName: \"kubernetes.io/projected/f37d61a2-7a86-40a3-9394-0ca13367e28f-kube-api-access-fbc47\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f37d61a2-7a86-40a3-9394-0ca13367e28f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.744164 4840 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f37d61a2-7a86-40a3-9394-0ca13367e28f\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.767110 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbc47\" (UniqueName: \"kubernetes.io/projected/f37d61a2-7a86-40a3-9394-0ca13367e28f-kube-api-access-fbc47\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f37d61a2-7a86-40a3-9394-0ca13367e28f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.788204 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f37d61a2-7a86-40a3-9394-0ca13367e28f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:40 crc kubenswrapper[4840]: I1205 15:57:40.942789 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 15:57:41 crc kubenswrapper[4840]: I1205 15:57:41.364925 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 15:57:42 crc kubenswrapper[4840]: I1205 15:57:42.174174 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f37d61a2-7a86-40a3-9394-0ca13367e28f","Type":"ContainerStarted","Data":"3395b35766978d8a6a5255f6e2ca4e08693ec2b813a74dfd8fecf3a4377fcc5d"} Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.182732 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f37d61a2-7a86-40a3-9394-0ca13367e28f","Type":"ContainerStarted","Data":"2905ad3ce00cf6479b3f92222fff3a0f28af7700c907947506c4c0e4d279cc31"} Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.200645 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.246554506 podStartE2EDuration="3.200624299s" podCreationTimestamp="2025-12-05 15:57:40 +0000 UTC" firstStartedPulling="2025-12-05 15:57:41.369306663 +0000 UTC m=+3539.710369277" lastFinishedPulling="2025-12-05 15:57:42.323376466 +0000 UTC m=+3540.664439070" observedRunningTime="2025-12-05 15:57:43.198370075 +0000 UTC m=+3541.539432709" watchObservedRunningTime="2025-12-05 15:57:43.200624299 +0000 UTC m=+3541.541686913" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.641448 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-b9qgp"] Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.643832 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.656678 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b9qgp"] Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.798433 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntz49\" (UniqueName: \"kubernetes.io/projected/794e9698-e300-48fb-84db-fc55d3cdd2cf-kube-api-access-ntz49\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.798576 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-catalog-content\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.798646 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-utilities\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.899835 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-catalog-content\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.900027 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-utilities\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.900064 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntz49\" (UniqueName: \"kubernetes.io/projected/794e9698-e300-48fb-84db-fc55d3cdd2cf-kube-api-access-ntz49\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.900376 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-catalog-content\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.901039 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-utilities\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.924997 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntz49\" (UniqueName: \"kubernetes.io/projected/794e9698-e300-48fb-84db-fc55d3cdd2cf-kube-api-access-ntz49\") pod \"certified-operators-b9qgp\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:43 crc kubenswrapper[4840]: I1205 15:57:43.963961 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:44 crc kubenswrapper[4840]: I1205 15:57:44.507416 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b9qgp"] Dec 05 15:57:44 crc kubenswrapper[4840]: W1205 15:57:44.510303 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod794e9698_e300_48fb_84db_fc55d3cdd2cf.slice/crio-6838c94d98023706907be07f93cda221d2ac88f5090c7196bcf1c03495cf192a WatchSource:0}: Error finding container 6838c94d98023706907be07f93cda221d2ac88f5090c7196bcf1c03495cf192a: Status 404 returned error can't find the container with id 6838c94d98023706907be07f93cda221d2ac88f5090c7196bcf1c03495cf192a Dec 05 15:57:45 crc kubenswrapper[4840]: I1205 15:57:45.230844 4840 generic.go:334] "Generic (PLEG): container finished" podID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerID="52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad" exitCode=0 Dec 05 15:57:45 crc kubenswrapper[4840]: I1205 15:57:45.231016 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b9qgp" event={"ID":"794e9698-e300-48fb-84db-fc55d3cdd2cf","Type":"ContainerDied","Data":"52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad"} Dec 05 15:57:45 crc kubenswrapper[4840]: I1205 15:57:45.231536 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b9qgp" event={"ID":"794e9698-e300-48fb-84db-fc55d3cdd2cf","Type":"ContainerStarted","Data":"6838c94d98023706907be07f93cda221d2ac88f5090c7196bcf1c03495cf192a"} Dec 05 15:57:46 crc kubenswrapper[4840]: I1205 15:57:46.248961 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b9qgp" event={"ID":"794e9698-e300-48fb-84db-fc55d3cdd2cf","Type":"ContainerStarted","Data":"17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f"} Dec 05 15:57:47 crc kubenswrapper[4840]: I1205 15:57:47.259369 4840 generic.go:334] "Generic (PLEG): container finished" podID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerID="17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f" exitCode=0 Dec 05 15:57:47 crc kubenswrapper[4840]: I1205 15:57:47.259450 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b9qgp" event={"ID":"794e9698-e300-48fb-84db-fc55d3cdd2cf","Type":"ContainerDied","Data":"17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f"} Dec 05 15:57:48 crc kubenswrapper[4840]: I1205 15:57:48.269938 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b9qgp" event={"ID":"794e9698-e300-48fb-84db-fc55d3cdd2cf","Type":"ContainerStarted","Data":"a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9"} Dec 05 15:57:48 crc kubenswrapper[4840]: I1205 15:57:48.289746 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-b9qgp" podStartSLOduration=2.791585329 podStartE2EDuration="5.289731115s" podCreationTimestamp="2025-12-05 15:57:43 +0000 UTC" firstStartedPulling="2025-12-05 15:57:45.233982036 +0000 UTC m=+3543.575044690" lastFinishedPulling="2025-12-05 15:57:47.732127842 +0000 UTC m=+3546.073190476" observedRunningTime="2025-12-05 15:57:48.289049056 +0000 UTC m=+3546.630111670" watchObservedRunningTime="2025-12-05 15:57:48.289731115 +0000 UTC m=+3546.630793729" Dec 05 15:57:53 crc kubenswrapper[4840]: I1205 15:57:53.964447 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:53 crc kubenswrapper[4840]: I1205 15:57:53.965110 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:54 crc kubenswrapper[4840]: I1205 15:57:54.018971 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:54 crc kubenswrapper[4840]: I1205 15:57:54.372239 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:54 crc kubenswrapper[4840]: I1205 15:57:54.418333 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b9qgp"] Dec 05 15:57:56 crc kubenswrapper[4840]: I1205 15:57:56.352395 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-b9qgp" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="registry-server" containerID="cri-o://a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9" gracePeriod=2 Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.361912 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.364825 4840 generic.go:334] "Generic (PLEG): container finished" podID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerID="a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9" exitCode=0 Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.364883 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b9qgp" event={"ID":"794e9698-e300-48fb-84db-fc55d3cdd2cf","Type":"ContainerDied","Data":"a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9"} Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.364921 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b9qgp" event={"ID":"794e9698-e300-48fb-84db-fc55d3cdd2cf","Type":"ContainerDied","Data":"6838c94d98023706907be07f93cda221d2ac88f5090c7196bcf1c03495cf192a"} Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.364945 4840 scope.go:117] "RemoveContainer" containerID="a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.392309 4840 scope.go:117] "RemoveContainer" containerID="17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.436512 4840 scope.go:117] "RemoveContainer" containerID="52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.468389 4840 scope.go:117] "RemoveContainer" containerID="a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9" Dec 05 15:57:57 crc kubenswrapper[4840]: E1205 15:57:57.469209 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9\": container with ID starting with a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9 not found: ID does not exist" containerID="a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.469285 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9"} err="failed to get container status \"a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9\": rpc error: code = NotFound desc = could not find container \"a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9\": container with ID starting with a21ebe4e06774cf36cf75fc6133057d85358008c008d8602e777fe4392bc60f9 not found: ID does not exist" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.469331 4840 scope.go:117] "RemoveContainer" containerID="17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f" Dec 05 15:57:57 crc kubenswrapper[4840]: E1205 15:57:57.469950 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f\": container with ID starting with 17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f not found: ID does not exist" containerID="17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.470006 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f"} err="failed to get container status \"17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f\": rpc error: code = NotFound desc = could not find container \"17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f\": container with ID starting with 17ef95c1e08826ab0182c4e4e9acad7b5e0d5896eee2c0ce26c9ec248876652f not found: ID does not exist" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.470041 4840 scope.go:117] "RemoveContainer" containerID="52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad" Dec 05 15:57:57 crc kubenswrapper[4840]: E1205 15:57:57.470434 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad\": container with ID starting with 52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad not found: ID does not exist" containerID="52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.470633 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad"} err="failed to get container status \"52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad\": rpc error: code = NotFound desc = could not find container \"52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad\": container with ID starting with 52f37bf5c62278d2b1195a2a9e093950378fcc382cbfcfdd6039d265354c64ad not found: ID does not exist" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.522970 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-utilities\") pod \"794e9698-e300-48fb-84db-fc55d3cdd2cf\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.523129 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-catalog-content\") pod \"794e9698-e300-48fb-84db-fc55d3cdd2cf\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.523209 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntz49\" (UniqueName: \"kubernetes.io/projected/794e9698-e300-48fb-84db-fc55d3cdd2cf-kube-api-access-ntz49\") pod \"794e9698-e300-48fb-84db-fc55d3cdd2cf\" (UID: \"794e9698-e300-48fb-84db-fc55d3cdd2cf\") " Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.525145 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-utilities" (OuterVolumeSpecName: "utilities") pod "794e9698-e300-48fb-84db-fc55d3cdd2cf" (UID: "794e9698-e300-48fb-84db-fc55d3cdd2cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.530394 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/794e9698-e300-48fb-84db-fc55d3cdd2cf-kube-api-access-ntz49" (OuterVolumeSpecName: "kube-api-access-ntz49") pod "794e9698-e300-48fb-84db-fc55d3cdd2cf" (UID: "794e9698-e300-48fb-84db-fc55d3cdd2cf"). InnerVolumeSpecName "kube-api-access-ntz49". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.571955 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "794e9698-e300-48fb-84db-fc55d3cdd2cf" (UID: "794e9698-e300-48fb-84db-fc55d3cdd2cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.624962 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.625002 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntz49\" (UniqueName: \"kubernetes.io/projected/794e9698-e300-48fb-84db-fc55d3cdd2cf-kube-api-access-ntz49\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:57 crc kubenswrapper[4840]: I1205 15:57:57.625016 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/794e9698-e300-48fb-84db-fc55d3cdd2cf-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 15:57:58 crc kubenswrapper[4840]: I1205 15:57:58.374503 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b9qgp" Dec 05 15:57:58 crc kubenswrapper[4840]: I1205 15:57:58.395427 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b9qgp"] Dec 05 15:57:58 crc kubenswrapper[4840]: I1205 15:57:58.402687 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-b9qgp"] Dec 05 15:58:00 crc kubenswrapper[4840]: I1205 15:58:00.077769 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" path="/var/lib/kubelet/pods/794e9698-e300-48fb-84db-fc55d3cdd2cf/volumes" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.746592 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mv5m4/must-gather-8gqpk"] Dec 05 15:58:05 crc kubenswrapper[4840]: E1205 15:58:05.747541 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="registry-server" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.747558 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="registry-server" Dec 05 15:58:05 crc kubenswrapper[4840]: E1205 15:58:05.747582 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="extract-content" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.747591 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="extract-content" Dec 05 15:58:05 crc kubenswrapper[4840]: E1205 15:58:05.747616 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="extract-utilities" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.747624 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="extract-utilities" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.747881 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="794e9698-e300-48fb-84db-fc55d3cdd2cf" containerName="registry-server" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.749114 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.753058 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-mv5m4"/"default-dockercfg-gltb5" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.753335 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mv5m4"/"kube-root-ca.crt" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.753572 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mv5m4"/"openshift-service-ca.crt" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.758258 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mv5m4/must-gather-8gqpk"] Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.916575 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/60370ced-19ef-46f7-bbee-9766d1bcda64-must-gather-output\") pod \"must-gather-8gqpk\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:05 crc kubenswrapper[4840]: I1205 15:58:05.916646 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bp8v\" (UniqueName: \"kubernetes.io/projected/60370ced-19ef-46f7-bbee-9766d1bcda64-kube-api-access-9bp8v\") pod \"must-gather-8gqpk\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:06 crc kubenswrapper[4840]: I1205 15:58:06.018954 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/60370ced-19ef-46f7-bbee-9766d1bcda64-must-gather-output\") pod \"must-gather-8gqpk\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:06 crc kubenswrapper[4840]: I1205 15:58:06.019013 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bp8v\" (UniqueName: \"kubernetes.io/projected/60370ced-19ef-46f7-bbee-9766d1bcda64-kube-api-access-9bp8v\") pod \"must-gather-8gqpk\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:06 crc kubenswrapper[4840]: I1205 15:58:06.019571 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/60370ced-19ef-46f7-bbee-9766d1bcda64-must-gather-output\") pod \"must-gather-8gqpk\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:06 crc kubenswrapper[4840]: I1205 15:58:06.039066 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bp8v\" (UniqueName: \"kubernetes.io/projected/60370ced-19ef-46f7-bbee-9766d1bcda64-kube-api-access-9bp8v\") pod \"must-gather-8gqpk\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:06 crc kubenswrapper[4840]: I1205 15:58:06.065787 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 15:58:06 crc kubenswrapper[4840]: I1205 15:58:06.520222 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mv5m4/must-gather-8gqpk"] Dec 05 15:58:07 crc kubenswrapper[4840]: I1205 15:58:07.468521 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" event={"ID":"60370ced-19ef-46f7-bbee-9766d1bcda64","Type":"ContainerStarted","Data":"0e876f5a1010be5dee1e5447aa8680b8724f4c8b90b701c71cad6a817afb9b42"} Dec 05 15:58:11 crc kubenswrapper[4840]: I1205 15:58:11.510459 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" event={"ID":"60370ced-19ef-46f7-bbee-9766d1bcda64","Type":"ContainerStarted","Data":"51335d43b71109b7b3096628d1776223d549b23e1c66f3d211e9debb0223bf3f"} Dec 05 15:58:11 crc kubenswrapper[4840]: I1205 15:58:11.511003 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" event={"ID":"60370ced-19ef-46f7-bbee-9766d1bcda64","Type":"ContainerStarted","Data":"3a9e278faaf7e5b15f4c9ce5d4459d193157ec69236072b270bc278ca0615954"} Dec 05 15:58:11 crc kubenswrapper[4840]: I1205 15:58:11.528340 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" podStartSLOduration=2.690501125 podStartE2EDuration="6.528320416s" podCreationTimestamp="2025-12-05 15:58:05 +0000 UTC" firstStartedPulling="2025-12-05 15:58:06.531629331 +0000 UTC m=+3564.872691945" lastFinishedPulling="2025-12-05 15:58:10.369448632 +0000 UTC m=+3568.710511236" observedRunningTime="2025-12-05 15:58:11.523978283 +0000 UTC m=+3569.865040887" watchObservedRunningTime="2025-12-05 15:58:11.528320416 +0000 UTC m=+3569.869383030" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.387121 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-lwdk2"] Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.389271 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.446239 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdk7g\" (UniqueName: \"kubernetes.io/projected/4781c483-6190-458c-9a96-b2d65f285dab-kube-api-access-wdk7g\") pod \"crc-debug-lwdk2\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.446616 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4781c483-6190-458c-9a96-b2d65f285dab-host\") pod \"crc-debug-lwdk2\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.548635 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdk7g\" (UniqueName: \"kubernetes.io/projected/4781c483-6190-458c-9a96-b2d65f285dab-kube-api-access-wdk7g\") pod \"crc-debug-lwdk2\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.548765 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4781c483-6190-458c-9a96-b2d65f285dab-host\") pod \"crc-debug-lwdk2\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.548952 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4781c483-6190-458c-9a96-b2d65f285dab-host\") pod \"crc-debug-lwdk2\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.587819 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdk7g\" (UniqueName: \"kubernetes.io/projected/4781c483-6190-458c-9a96-b2d65f285dab-kube-api-access-wdk7g\") pod \"crc-debug-lwdk2\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: I1205 15:58:14.708387 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:58:14 crc kubenswrapper[4840]: W1205 15:58:14.769158 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4781c483_6190_458c_9a96_b2d65f285dab.slice/crio-cd817f2634d95c72c8c3329a5b2b0d39f34cc19b110f1c5705abc2181f682ebd WatchSource:0}: Error finding container cd817f2634d95c72c8c3329a5b2b0d39f34cc19b110f1c5705abc2181f682ebd: Status 404 returned error can't find the container with id cd817f2634d95c72c8c3329a5b2b0d39f34cc19b110f1c5705abc2181f682ebd Dec 05 15:58:15 crc kubenswrapper[4840]: I1205 15:58:15.552369 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" event={"ID":"4781c483-6190-458c-9a96-b2d65f285dab","Type":"ContainerStarted","Data":"cd817f2634d95c72c8c3329a5b2b0d39f34cc19b110f1c5705abc2181f682ebd"} Dec 05 15:58:19 crc kubenswrapper[4840]: I1205 15:58:19.471577 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:58:19 crc kubenswrapper[4840]: I1205 15:58:19.472148 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:58:25 crc kubenswrapper[4840]: I1205 15:58:25.689194 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" event={"ID":"4781c483-6190-458c-9a96-b2d65f285dab","Type":"ContainerStarted","Data":"7e2255b73bb8114a1985516eda145a94f1edcc23f51fa9b3d59b662338428a2e"} Dec 05 15:58:25 crc kubenswrapper[4840]: I1205 15:58:25.711536 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" podStartSLOduration=1.289549794 podStartE2EDuration="11.711516294s" podCreationTimestamp="2025-12-05 15:58:14 +0000 UTC" firstStartedPulling="2025-12-05 15:58:14.770953938 +0000 UTC m=+3573.112016552" lastFinishedPulling="2025-12-05 15:58:25.192920438 +0000 UTC m=+3583.533983052" observedRunningTime="2025-12-05 15:58:25.709698972 +0000 UTC m=+3584.050761586" watchObservedRunningTime="2025-12-05 15:58:25.711516294 +0000 UTC m=+3584.052578908" Dec 05 15:58:49 crc kubenswrapper[4840]: I1205 15:58:49.472679 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:58:49 crc kubenswrapper[4840]: I1205 15:58:49.473374 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:59:11 crc kubenswrapper[4840]: I1205 15:59:11.119610 4840 generic.go:334] "Generic (PLEG): container finished" podID="4781c483-6190-458c-9a96-b2d65f285dab" containerID="7e2255b73bb8114a1985516eda145a94f1edcc23f51fa9b3d59b662338428a2e" exitCode=0 Dec 05 15:59:11 crc kubenswrapper[4840]: I1205 15:59:11.119704 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" event={"ID":"4781c483-6190-458c-9a96-b2d65f285dab","Type":"ContainerDied","Data":"7e2255b73bb8114a1985516eda145a94f1edcc23f51fa9b3d59b662338428a2e"} Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.244016 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.302337 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-lwdk2"] Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.311133 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-lwdk2"] Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.328584 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4781c483-6190-458c-9a96-b2d65f285dab-host\") pod \"4781c483-6190-458c-9a96-b2d65f285dab\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.328710 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4781c483-6190-458c-9a96-b2d65f285dab-host" (OuterVolumeSpecName: "host") pod "4781c483-6190-458c-9a96-b2d65f285dab" (UID: "4781c483-6190-458c-9a96-b2d65f285dab"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.328835 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdk7g\" (UniqueName: \"kubernetes.io/projected/4781c483-6190-458c-9a96-b2d65f285dab-kube-api-access-wdk7g\") pod \"4781c483-6190-458c-9a96-b2d65f285dab\" (UID: \"4781c483-6190-458c-9a96-b2d65f285dab\") " Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.329398 4840 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4781c483-6190-458c-9a96-b2d65f285dab-host\") on node \"crc\" DevicePath \"\"" Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.333947 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4781c483-6190-458c-9a96-b2d65f285dab-kube-api-access-wdk7g" (OuterVolumeSpecName: "kube-api-access-wdk7g") pod "4781c483-6190-458c-9a96-b2d65f285dab" (UID: "4781c483-6190-458c-9a96-b2d65f285dab"). InnerVolumeSpecName "kube-api-access-wdk7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:59:12 crc kubenswrapper[4840]: I1205 15:59:12.430958 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdk7g\" (UniqueName: \"kubernetes.io/projected/4781c483-6190-458c-9a96-b2d65f285dab-kube-api-access-wdk7g\") on node \"crc\" DevicePath \"\"" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.139060 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd817f2634d95c72c8c3329a5b2b0d39f34cc19b110f1c5705abc2181f682ebd" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.139142 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-lwdk2" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.459187 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-kjt7t"] Dec 05 15:59:13 crc kubenswrapper[4840]: E1205 15:59:13.459755 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4781c483-6190-458c-9a96-b2d65f285dab" containerName="container-00" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.459771 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="4781c483-6190-458c-9a96-b2d65f285dab" containerName="container-00" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.459962 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="4781c483-6190-458c-9a96-b2d65f285dab" containerName="container-00" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.460573 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.550363 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35db1a70-9498-4e4e-a876-e1635e528b15-host\") pod \"crc-debug-kjt7t\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.550749 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnfh9\" (UniqueName: \"kubernetes.io/projected/35db1a70-9498-4e4e-a876-e1635e528b15-kube-api-access-qnfh9\") pod \"crc-debug-kjt7t\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.652321 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35db1a70-9498-4e4e-a876-e1635e528b15-host\") pod \"crc-debug-kjt7t\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.652427 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnfh9\" (UniqueName: \"kubernetes.io/projected/35db1a70-9498-4e4e-a876-e1635e528b15-kube-api-access-qnfh9\") pod \"crc-debug-kjt7t\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.652447 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35db1a70-9498-4e4e-a876-e1635e528b15-host\") pod \"crc-debug-kjt7t\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.674824 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnfh9\" (UniqueName: \"kubernetes.io/projected/35db1a70-9498-4e4e-a876-e1635e528b15-kube-api-access-qnfh9\") pod \"crc-debug-kjt7t\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: I1205 15:59:13.781007 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:13 crc kubenswrapper[4840]: W1205 15:59:13.819257 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35db1a70_9498_4e4e_a876_e1635e528b15.slice/crio-10cee0a6f118121b056f176f69be616dd829e8600d4df656c06e17d099ef04ef WatchSource:0}: Error finding container 10cee0a6f118121b056f176f69be616dd829e8600d4df656c06e17d099ef04ef: Status 404 returned error can't find the container with id 10cee0a6f118121b056f176f69be616dd829e8600d4df656c06e17d099ef04ef Dec 05 15:59:14 crc kubenswrapper[4840]: I1205 15:59:14.079303 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4781c483-6190-458c-9a96-b2d65f285dab" path="/var/lib/kubelet/pods/4781c483-6190-458c-9a96-b2d65f285dab/volumes" Dec 05 15:59:14 crc kubenswrapper[4840]: I1205 15:59:14.187912 4840 generic.go:334] "Generic (PLEG): container finished" podID="35db1a70-9498-4e4e-a876-e1635e528b15" containerID="e631bbc6af486b58542659a1b820b46cb66643cb5b68c4c07d904df28cd5da9a" exitCode=0 Dec 05 15:59:14 crc kubenswrapper[4840]: I1205 15:59:14.187961 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" event={"ID":"35db1a70-9498-4e4e-a876-e1635e528b15","Type":"ContainerDied","Data":"e631bbc6af486b58542659a1b820b46cb66643cb5b68c4c07d904df28cd5da9a"} Dec 05 15:59:14 crc kubenswrapper[4840]: I1205 15:59:14.187989 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" event={"ID":"35db1a70-9498-4e4e-a876-e1635e528b15","Type":"ContainerStarted","Data":"10cee0a6f118121b056f176f69be616dd829e8600d4df656c06e17d099ef04ef"} Dec 05 15:59:14 crc kubenswrapper[4840]: I1205 15:59:14.699166 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-kjt7t"] Dec 05 15:59:14 crc kubenswrapper[4840]: I1205 15:59:14.707783 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-kjt7t"] Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.288768 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.382576 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnfh9\" (UniqueName: \"kubernetes.io/projected/35db1a70-9498-4e4e-a876-e1635e528b15-kube-api-access-qnfh9\") pod \"35db1a70-9498-4e4e-a876-e1635e528b15\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.382963 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35db1a70-9498-4e4e-a876-e1635e528b15-host\") pod \"35db1a70-9498-4e4e-a876-e1635e528b15\" (UID: \"35db1a70-9498-4e4e-a876-e1635e528b15\") " Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.383054 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/35db1a70-9498-4e4e-a876-e1635e528b15-host" (OuterVolumeSpecName: "host") pod "35db1a70-9498-4e4e-a876-e1635e528b15" (UID: "35db1a70-9498-4e4e-a876-e1635e528b15"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.383759 4840 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/35db1a70-9498-4e4e-a876-e1635e528b15-host\") on node \"crc\" DevicePath \"\"" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.388947 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35db1a70-9498-4e4e-a876-e1635e528b15-kube-api-access-qnfh9" (OuterVolumeSpecName: "kube-api-access-qnfh9") pod "35db1a70-9498-4e4e-a876-e1635e528b15" (UID: "35db1a70-9498-4e4e-a876-e1635e528b15"). InnerVolumeSpecName "kube-api-access-qnfh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.485235 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnfh9\" (UniqueName: \"kubernetes.io/projected/35db1a70-9498-4e4e-a876-e1635e528b15-kube-api-access-qnfh9\") on node \"crc\" DevicePath \"\"" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.890029 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-whxls"] Dec 05 15:59:15 crc kubenswrapper[4840]: E1205 15:59:15.890496 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35db1a70-9498-4e4e-a876-e1635e528b15" containerName="container-00" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.890513 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="35db1a70-9498-4e4e-a876-e1635e528b15" containerName="container-00" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.890749 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="35db1a70-9498-4e4e-a876-e1635e528b15" containerName="container-00" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.891542 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.994355 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nslh5\" (UniqueName: \"kubernetes.io/projected/7a458bb1-104f-44ab-b686-03aa2e67ee5f-kube-api-access-nslh5\") pod \"crc-debug-whxls\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:15 crc kubenswrapper[4840]: I1205 15:59:15.994507 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a458bb1-104f-44ab-b686-03aa2e67ee5f-host\") pod \"crc-debug-whxls\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.077969 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35db1a70-9498-4e4e-a876-e1635e528b15" path="/var/lib/kubelet/pods/35db1a70-9498-4e4e-a876-e1635e528b15/volumes" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.096612 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nslh5\" (UniqueName: \"kubernetes.io/projected/7a458bb1-104f-44ab-b686-03aa2e67ee5f-kube-api-access-nslh5\") pod \"crc-debug-whxls\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.097057 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a458bb1-104f-44ab-b686-03aa2e67ee5f-host\") pod \"crc-debug-whxls\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.097195 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a458bb1-104f-44ab-b686-03aa2e67ee5f-host\") pod \"crc-debug-whxls\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.115797 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nslh5\" (UniqueName: \"kubernetes.io/projected/7a458bb1-104f-44ab-b686-03aa2e67ee5f-kube-api-access-nslh5\") pod \"crc-debug-whxls\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.205500 4840 scope.go:117] "RemoveContainer" containerID="e631bbc6af486b58542659a1b820b46cb66643cb5b68c4c07d904df28cd5da9a" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.205546 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-kjt7t" Dec 05 15:59:16 crc kubenswrapper[4840]: I1205 15:59:16.212216 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:16 crc kubenswrapper[4840]: W1205 15:59:16.259176 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a458bb1_104f_44ab_b686_03aa2e67ee5f.slice/crio-3dee11d39dc4ca83721452d29f41f4d2516e2934f331570961388f1484c9b302 WatchSource:0}: Error finding container 3dee11d39dc4ca83721452d29f41f4d2516e2934f331570961388f1484c9b302: Status 404 returned error can't find the container with id 3dee11d39dc4ca83721452d29f41f4d2516e2934f331570961388f1484c9b302 Dec 05 15:59:17 crc kubenswrapper[4840]: I1205 15:59:17.217353 4840 generic.go:334] "Generic (PLEG): container finished" podID="7a458bb1-104f-44ab-b686-03aa2e67ee5f" containerID="18c3b922e6a69f91dd576f4b852024c95c4d4bbc92bea42ab5d98400ed85cc04" exitCode=0 Dec 05 15:59:17 crc kubenswrapper[4840]: I1205 15:59:17.217432 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/crc-debug-whxls" event={"ID":"7a458bb1-104f-44ab-b686-03aa2e67ee5f","Type":"ContainerDied","Data":"18c3b922e6a69f91dd576f4b852024c95c4d4bbc92bea42ab5d98400ed85cc04"} Dec 05 15:59:17 crc kubenswrapper[4840]: I1205 15:59:17.217674 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/crc-debug-whxls" event={"ID":"7a458bb1-104f-44ab-b686-03aa2e67ee5f","Type":"ContainerStarted","Data":"3dee11d39dc4ca83721452d29f41f4d2516e2934f331570961388f1484c9b302"} Dec 05 15:59:17 crc kubenswrapper[4840]: I1205 15:59:17.256045 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-whxls"] Dec 05 15:59:17 crc kubenswrapper[4840]: I1205 15:59:17.263304 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mv5m4/crc-debug-whxls"] Dec 05 15:59:18 crc kubenswrapper[4840]: I1205 15:59:18.394919 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:18 crc kubenswrapper[4840]: I1205 15:59:18.542075 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a458bb1-104f-44ab-b686-03aa2e67ee5f-host\") pod \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " Dec 05 15:59:18 crc kubenswrapper[4840]: I1205 15:59:18.542190 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nslh5\" (UniqueName: \"kubernetes.io/projected/7a458bb1-104f-44ab-b686-03aa2e67ee5f-kube-api-access-nslh5\") pod \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\" (UID: \"7a458bb1-104f-44ab-b686-03aa2e67ee5f\") " Dec 05 15:59:18 crc kubenswrapper[4840]: I1205 15:59:18.542180 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a458bb1-104f-44ab-b686-03aa2e67ee5f-host" (OuterVolumeSpecName: "host") pod "7a458bb1-104f-44ab-b686-03aa2e67ee5f" (UID: "7a458bb1-104f-44ab-b686-03aa2e67ee5f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 15:59:18 crc kubenswrapper[4840]: I1205 15:59:18.542655 4840 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a458bb1-104f-44ab-b686-03aa2e67ee5f-host\") on node \"crc\" DevicePath \"\"" Dec 05 15:59:18 crc kubenswrapper[4840]: I1205 15:59:18.548474 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a458bb1-104f-44ab-b686-03aa2e67ee5f-kube-api-access-nslh5" (OuterVolumeSpecName: "kube-api-access-nslh5") pod "7a458bb1-104f-44ab-b686-03aa2e67ee5f" (UID: "7a458bb1-104f-44ab-b686-03aa2e67ee5f"). InnerVolumeSpecName "kube-api-access-nslh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 15:59:18 crc kubenswrapper[4840]: I1205 15:59:18.644057 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nslh5\" (UniqueName: \"kubernetes.io/projected/7a458bb1-104f-44ab-b686-03aa2e67ee5f-kube-api-access-nslh5\") on node \"crc\" DevicePath \"\"" Dec 05 15:59:19 crc kubenswrapper[4840]: I1205 15:59:19.283585 4840 scope.go:117] "RemoveContainer" containerID="18c3b922e6a69f91dd576f4b852024c95c4d4bbc92bea42ab5d98400ed85cc04" Dec 05 15:59:19 crc kubenswrapper[4840]: I1205 15:59:19.283612 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/crc-debug-whxls" Dec 05 15:59:19 crc kubenswrapper[4840]: I1205 15:59:19.471825 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 15:59:19 crc kubenswrapper[4840]: I1205 15:59:19.471901 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 15:59:19 crc kubenswrapper[4840]: I1205 15:59:19.471952 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 15:59:19 crc kubenswrapper[4840]: I1205 15:59:19.472664 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 15:59:19 crc kubenswrapper[4840]: I1205 15:59:19.472732 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" gracePeriod=600 Dec 05 15:59:19 crc kubenswrapper[4840]: E1205 15:59:19.609134 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:59:20 crc kubenswrapper[4840]: I1205 15:59:20.079425 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a458bb1-104f-44ab-b686-03aa2e67ee5f" path="/var/lib/kubelet/pods/7a458bb1-104f-44ab-b686-03aa2e67ee5f/volumes" Dec 05 15:59:20 crc kubenswrapper[4840]: I1205 15:59:20.299138 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" exitCode=0 Dec 05 15:59:20 crc kubenswrapper[4840]: I1205 15:59:20.299178 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96"} Dec 05 15:59:20 crc kubenswrapper[4840]: I1205 15:59:20.299211 4840 scope.go:117] "RemoveContainer" containerID="2b478102a94947c649d5d899c2c867efcfa2b258c8381dcbdcfbcf9f4ef1907a" Dec 05 15:59:20 crc kubenswrapper[4840]: I1205 15:59:20.299908 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 15:59:20 crc kubenswrapper[4840]: E1205 15:59:20.300259 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:59:31 crc kubenswrapper[4840]: I1205 15:59:31.067407 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 15:59:31 crc kubenswrapper[4840]: E1205 15:59:31.068132 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.203179 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-897596894-64sgb_9eb8c11f-7d8a-4330-804c-e9fa74cd10e7/barbican-api/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.290837 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-897596894-64sgb_9eb8c11f-7d8a-4330-804c-e9fa74cd10e7/barbican-api-log/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.427744 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-57cfd5878-qwpfg_7002d387-4756-4e68-b238-6e9cbf1d9b10/barbican-keystone-listener/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.505531 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-57cfd5878-qwpfg_7002d387-4756-4e68-b238-6e9cbf1d9b10/barbican-keystone-listener-log/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.566035 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-67bdc7dbc-8r6xf_9670574b-79d6-495e-abc8-123bf1582742/barbican-worker/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.657423 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-67bdc7dbc-8r6xf_9670574b-79d6-495e-abc8-123bf1582742/barbican-worker-log/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.774273 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt_3298a054-72de-4060-95c4-ff42a8ed3a7f/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.897608 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/ceilometer-central-agent/0.log" Dec 05 15:59:33 crc kubenswrapper[4840]: I1205 15:59:33.977502 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/ceilometer-notification-agent/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.010278 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/proxy-httpd/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.091218 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/sg-core/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.234974 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b2379ec8-f983-42df-9255-2a97b8589b6d/cinder-api/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.240499 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b2379ec8-f983-42df-9255-2a97b8589b6d/cinder-api-log/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.442041 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bd99a832-4ebb-49a0-88ae-89f2b247da8a/cinder-scheduler/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.484556 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bd99a832-4ebb-49a0-88ae-89f2b247da8a/probe/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.575578 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jftg5_997f1855-be81-4a43-94c8-2f6001a12c0d/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.716244 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx_23c1423f-c01b-4a22-b2de-63e6a8646eed/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.825448 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-dkpw9_0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe/init/0.log" Dec 05 15:59:34 crc kubenswrapper[4840]: I1205 15:59:34.956901 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-dkpw9_0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe/init/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.001895 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-dkpw9_0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe/dnsmasq-dns/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.052987 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tjftf_51bf8de7-4ac8-4478-af9d-7b438f6afb1c/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.238263 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b8843aac-3856-4c2d-80d4-d3f642065c75/glance-httpd/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.256712 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b8843aac-3856-4c2d-80d4-d3f642065c75/glance-log/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.441854 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6326423c-2a7f-4f3c-b361-de370bd51817/glance-httpd/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.519682 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6326423c-2a7f-4f3c-b361-de370bd51817/glance-log/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.636031 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7b99464548-lx7k9_d384faa1-3b3b-45f8-bf4b-902236ec40da/horizon/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.813367 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-97vkv_d89744d6-5d83-4152-8a17-ab5bddf86ad9/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:35 crc kubenswrapper[4840]: I1205 15:59:35.961319 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7b99464548-lx7k9_d384faa1-3b3b-45f8-bf4b-902236ec40da/horizon-log/0.log" Dec 05 15:59:36 crc kubenswrapper[4840]: I1205 15:59:36.028562 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-t788v_30a7c1b0-8c3f-48e7-be82-bc57e708cd5e/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:36 crc kubenswrapper[4840]: I1205 15:59:36.245579 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_b4cc279e-ffd3-45a6-94cb-e787194bb137/kube-state-metrics/0.log" Dec 05 15:59:36 crc kubenswrapper[4840]: I1205 15:59:36.331184 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7b75bf6fbf-wj8dt_c98e01c4-b177-45c9-9f0b-bd02f90fe5d2/keystone-api/0.log" Dec 05 15:59:36 crc kubenswrapper[4840]: I1205 15:59:36.434570 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk_453e239f-2acb-42cb-a617-35975fb5437a/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:36 crc kubenswrapper[4840]: I1205 15:59:36.966704 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d557dcbfc-24qsg_3f464dff-9cae-4492-9e99-7d0343ecefbe/neutron-httpd/0.log" Dec 05 15:59:36 crc kubenswrapper[4840]: I1205 15:59:36.981151 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d557dcbfc-24qsg_3f464dff-9cae-4492-9e99-7d0343ecefbe/neutron-api/0.log" Dec 05 15:59:37 crc kubenswrapper[4840]: I1205 15:59:37.132588 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp_e9a9f7dd-12db-447d-a9d9-f279b5f72f5b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:37 crc kubenswrapper[4840]: I1205 15:59:37.586343 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4d40873e-c669-464c-8c3d-bf5d60c99e62/nova-api-log/0.log" Dec 05 15:59:37 crc kubenswrapper[4840]: I1205 15:59:37.601006 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_5c921bef-fe53-4e64-bf34-1faa504c8a15/nova-cell0-conductor-conductor/0.log" Dec 05 15:59:37 crc kubenswrapper[4840]: I1205 15:59:37.798555 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4d40873e-c669-464c-8c3d-bf5d60c99e62/nova-api-api/0.log" Dec 05 15:59:37 crc kubenswrapper[4840]: I1205 15:59:37.883929 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_ef118f1d-aaf7-48be-b9aa-ec84d23ea999/nova-cell1-conductor-conductor/0.log" Dec 05 15:59:37 crc kubenswrapper[4840]: I1205 15:59:37.995521 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_c5bca6a7-9654-492d-9687-e7672c18117f/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 15:59:38 crc kubenswrapper[4840]: I1205 15:59:38.180570 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-4c2f4_c64b6a7d-6e39-40f8-8837-660f386a357e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:38 crc kubenswrapper[4840]: I1205 15:59:38.290502 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0853ab99-c6ae-4ea4-8aa3-119437720120/nova-metadata-log/0.log" Dec 05 15:59:38 crc kubenswrapper[4840]: I1205 15:59:38.524850 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_863cd7d3-d2a4-44eb-88c8-c3cd9259cb78/nova-scheduler-scheduler/0.log" Dec 05 15:59:38 crc kubenswrapper[4840]: I1205 15:59:38.624351 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_28fec705-baad-4e89-94d4-e1e7d64579a1/mysql-bootstrap/0.log" Dec 05 15:59:38 crc kubenswrapper[4840]: I1205 15:59:38.847679 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_28fec705-baad-4e89-94d4-e1e7d64579a1/galera/0.log" Dec 05 15:59:38 crc kubenswrapper[4840]: I1205 15:59:38.874805 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_28fec705-baad-4e89-94d4-e1e7d64579a1/mysql-bootstrap/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.075739 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b1a586cf-ff08-4975-b172-0167bb10ff77/mysql-bootstrap/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.249050 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b1a586cf-ff08-4975-b172-0167bb10ff77/mysql-bootstrap/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.260469 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b1a586cf-ff08-4975-b172-0167bb10ff77/galera/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.341276 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0853ab99-c6ae-4ea4-8aa3-119437720120/nova-metadata-metadata/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.446984 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_304fce22-b828-4844-9db0-13120847afc1/openstackclient/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.585495 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-cttvn_ab69b71e-d666-46a7-a896-96a70fff685a/ovn-controller/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.768660 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-kbwmc_3548faf3-ee23-449d-b44c-5858d2cdc9ec/openstack-network-exporter/0.log" Dec 05 15:59:39 crc kubenswrapper[4840]: I1205 15:59:39.775516 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovsdb-server-init/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.002299 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovs-vswitchd/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.008461 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovsdb-server-init/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.109324 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovsdb-server/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.426212 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-sclgc_d0c216f7-ce43-4852-9788-e1f5e5705ec4/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.482069 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1457a36a-acaa-42e9-b5ea-7667c272d25d/ovn-northd/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.504555 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1457a36a-acaa-42e9-b5ea-7667c272d25d/openstack-network-exporter/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.716273 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0fdc90f7-3261-4c8f-860b-c5f3890d3470/openstack-network-exporter/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.726083 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0fdc90f7-3261-4c8f-860b-c5f3890d3470/ovsdbserver-nb/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.947743 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7548669d-ea2b-4442-b4b6-f3408d636798/openstack-network-exporter/0.log" Dec 05 15:59:40 crc kubenswrapper[4840]: I1205 15:59:40.954640 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7548669d-ea2b-4442-b4b6-f3408d636798/ovsdbserver-sb/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.085269 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5d878656b-vljqr_dc5ac202-1b33-4a65-aab2-d5fe6e62f844/placement-api/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.208961 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0b4037e-9bd6-4a53-84b3-941d72023ce3/setup-container/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.264438 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5d878656b-vljqr_dc5ac202-1b33-4a65-aab2-d5fe6e62f844/placement-log/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.476556 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0b4037e-9bd6-4a53-84b3-941d72023ce3/setup-container/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.518232 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4b53cdac-e8cf-4dc5-abed-0d20e7ca8140/setup-container/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.524104 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0b4037e-9bd6-4a53-84b3-941d72023ce3/rabbitmq/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.837249 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4b53cdac-e8cf-4dc5-abed-0d20e7ca8140/setup-container/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.880175 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-96xww_b0e3c935-42f8-456b-8870-a4ca2f9fce1d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:41 crc kubenswrapper[4840]: I1205 15:59:41.887763 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4b53cdac-e8cf-4dc5-abed-0d20e7ca8140/rabbitmq/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.027783 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-4m26l_740d3f73-d31d-4a95-9830-ed5545f8525a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.078926 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 15:59:42 crc kubenswrapper[4840]: E1205 15:59:42.079177 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.126178 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7_50a15428-e663-42af-a044-01daa7f04c93/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.321383 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-thbrf_54e23b4a-dfba-45da-9197-cfbdcbd4ccfe/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.357959 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-8sfl9_0e16306a-b5d0-468b-b0a2-b19ad5af4592/ssh-known-hosts-edpm-deployment/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.557099 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d87df5bc-m4vp7_8217ba67-c10d-43b2-8e12-41c6c25aa2da/proxy-server/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.621544 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d87df5bc-m4vp7_8217ba67-c10d-43b2-8e12-41c6c25aa2da/proxy-httpd/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.695534 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-b696t_45ba7f21-a1e8-4443-816f-91c5392f62df/swift-ring-rebalance/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.861002 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-auditor/0.log" Dec 05 15:59:42 crc kubenswrapper[4840]: I1205 15:59:42.976318 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-reaper/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.007780 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-replicator/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.028224 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-server/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.110512 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-auditor/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.212661 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-replicator/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.250323 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-server/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.287389 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-updater/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.386002 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-auditor/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.494139 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-expirer/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.503991 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-replicator/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.516393 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-server/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.639075 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-updater/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.698919 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/rsync/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.724260 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/swift-recon-cron/0.log" Dec 05 15:59:43 crc kubenswrapper[4840]: I1205 15:59:43.897365 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-76pjd_d51bc3d7-3ce5-4967-ba22-71cef47d25d1/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:44 crc kubenswrapper[4840]: I1205 15:59:44.201459 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_d2e8a783-170e-44cb-a505-1ee2a96572af/tempest-tests-tempest-tests-runner/0.log" Dec 05 15:59:44 crc kubenswrapper[4840]: I1205 15:59:44.241986 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f37d61a2-7a86-40a3-9394-0ca13367e28f/test-operator-logs-container/0.log" Dec 05 15:59:44 crc kubenswrapper[4840]: I1205 15:59:44.441441 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f_d55f8919-f3d7-4080-9573-b92529c9ec9f/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 15:59:53 crc kubenswrapper[4840]: I1205 15:59:53.022561 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_bd532161-0cce-4e82-b084-45e1569c1575/memcached/0.log" Dec 05 15:59:55 crc kubenswrapper[4840]: I1205 15:59:55.067016 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 15:59:55 crc kubenswrapper[4840]: E1205 15:59:55.067517 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.161702 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr"] Dec 05 16:00:00 crc kubenswrapper[4840]: E1205 16:00:00.163245 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a458bb1-104f-44ab-b686-03aa2e67ee5f" containerName="container-00" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.163283 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a458bb1-104f-44ab-b686-03aa2e67ee5f" containerName="container-00" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.163619 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a458bb1-104f-44ab-b686-03aa2e67ee5f" containerName="container-00" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.164768 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.247153 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.247401 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.256363 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr"] Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.360918 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-secret-volume\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.360976 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kktx\" (UniqueName: \"kubernetes.io/projected/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-kube-api-access-2kktx\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.361041 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-config-volume\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.463314 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-secret-volume\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.463390 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kktx\" (UniqueName: \"kubernetes.io/projected/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-kube-api-access-2kktx\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.463484 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-config-volume\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.464603 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-config-volume\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.475770 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-secret-volume\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.484005 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kktx\" (UniqueName: \"kubernetes.io/projected/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-kube-api-access-2kktx\") pod \"collect-profiles-29415840-t2kmr\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:00 crc kubenswrapper[4840]: I1205 16:00:00.588401 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:01 crc kubenswrapper[4840]: I1205 16:00:01.046826 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr"] Dec 05 16:00:01 crc kubenswrapper[4840]: I1205 16:00:01.723314 4840 generic.go:334] "Generic (PLEG): container finished" podID="5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf" containerID="33782d99b3bf00be49c85136c014183695aba9d49a9eeb4a1b311fbdfc199eba" exitCode=0 Dec 05 16:00:01 crc kubenswrapper[4840]: I1205 16:00:01.723535 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" event={"ID":"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf","Type":"ContainerDied","Data":"33782d99b3bf00be49c85136c014183695aba9d49a9eeb4a1b311fbdfc199eba"} Dec 05 16:00:01 crc kubenswrapper[4840]: I1205 16:00:01.723684 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" event={"ID":"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf","Type":"ContainerStarted","Data":"0d1171754d71cd0788ebebf2f4b883a22858385d25d41d586929e6a61092041c"} Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.159748 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.318572 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-secret-volume\") pod \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.319727 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-config-volume\") pod \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.319790 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kktx\" (UniqueName: \"kubernetes.io/projected/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-kube-api-access-2kktx\") pod \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\" (UID: \"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf\") " Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.320407 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-config-volume" (OuterVolumeSpecName: "config-volume") pod "5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf" (UID: "5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.325249 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf" (UID: "5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.335532 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-kube-api-access-2kktx" (OuterVolumeSpecName: "kube-api-access-2kktx") pod "5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf" (UID: "5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf"). InnerVolumeSpecName "kube-api-access-2kktx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.422024 4840 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.422062 4840 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.422076 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kktx\" (UniqueName: \"kubernetes.io/projected/5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf-kube-api-access-2kktx\") on node \"crc\" DevicePath \"\"" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.742410 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" event={"ID":"5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf","Type":"ContainerDied","Data":"0d1171754d71cd0788ebebf2f4b883a22858385d25d41d586929e6a61092041c"} Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.742673 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d1171754d71cd0788ebebf2f4b883a22858385d25d41d586929e6a61092041c" Dec 05 16:00:03 crc kubenswrapper[4840]: I1205 16:00:03.742550 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415840-t2kmr" Dec 05 16:00:04 crc kubenswrapper[4840]: I1205 16:00:04.233830 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6"] Dec 05 16:00:04 crc kubenswrapper[4840]: I1205 16:00:04.294720 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415795-dxxp6"] Dec 05 16:00:06 crc kubenswrapper[4840]: I1205 16:00:06.084541 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b286c3cb-717a-4be7-aecf-f2eaa8732dfd" path="/var/lib/kubelet/pods/b286c3cb-717a-4be7-aecf-f2eaa8732dfd/volumes" Dec 05 16:00:09 crc kubenswrapper[4840]: I1205 16:00:09.556781 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/util/0.log" Dec 05 16:00:09 crc kubenswrapper[4840]: I1205 16:00:09.749569 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/pull/0.log" Dec 05 16:00:09 crc kubenswrapper[4840]: I1205 16:00:09.771512 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/pull/0.log" Dec 05 16:00:09 crc kubenswrapper[4840]: I1205 16:00:09.821940 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/util/0.log" Dec 05 16:00:09 crc kubenswrapper[4840]: I1205 16:00:09.950459 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/util/0.log" Dec 05 16:00:09 crc kubenswrapper[4840]: I1205 16:00:09.950846 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/pull/0.log" Dec 05 16:00:09 crc kubenswrapper[4840]: I1205 16:00:09.956460 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/extract/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.066984 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:00:10 crc kubenswrapper[4840]: E1205 16:00:10.067404 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.147766 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7zmt7_c79f46e0-5947-4b4a-b581-0e49736fb41f/kube-rbac-proxy/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.178326 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7zmt7_c79f46e0-5947-4b4a-b581-0e49736fb41f/manager/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.224713 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dcbzq_94defa3c-b83c-44b9-83c0-e92bdf7944be/kube-rbac-proxy/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.351924 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dcbzq_94defa3c-b83c-44b9-83c0-e92bdf7944be/manager/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.388036 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-8vw7g_9f897b64-3aeb-44c6-a340-9e0082876e93/kube-rbac-proxy/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.454683 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-8vw7g_9f897b64-3aeb-44c6-a340-9e0082876e93/manager/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.599651 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fwvd2_c15ac393-953d-45e9-b8dc-7212c6e2366b/kube-rbac-proxy/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.672551 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fwvd2_c15ac393-953d-45e9-b8dc-7212c6e2366b/manager/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.735965 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-qz4dx_546f9401-ad92-49f1-836a-8e240bbc2d61/kube-rbac-proxy/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.818488 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-qz4dx_546f9401-ad92-49f1-836a-8e240bbc2d61/manager/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.938459 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-fmbzp_ff2da961-d2c6-486f-87bf-2394ee00a5a1/kube-rbac-proxy/0.log" Dec 05 16:00:10 crc kubenswrapper[4840]: I1205 16:00:10.949050 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-fmbzp_ff2da961-d2c6-486f-87bf-2394ee00a5a1/manager/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.189230 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-gpcmz_8e7074a0-bae6-49e7-8915-c4cb3242108d/kube-rbac-proxy/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.382234 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-q2pp7_48a2075e-30bb-41fb-a311-fb9b593182c6/kube-rbac-proxy/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.391974 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-gpcmz_8e7074a0-bae6-49e7-8915-c4cb3242108d/manager/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.396257 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-q2pp7_48a2075e-30bb-41fb-a311-fb9b593182c6/manager/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.601012 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-zjbwl_6edb6d08-9885-457f-8642-ef77c64de97a/kube-rbac-proxy/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.617795 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-zjbwl_6edb6d08-9885-457f-8642-ef77c64de97a/manager/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.846118 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n6chf_683c5938-459a-4c60-bb98-8237f6ddc4f6/kube-rbac-proxy/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.865442 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n6chf_683c5938-459a-4c60-bb98-8237f6ddc4f6/manager/0.log" Dec 05 16:00:11 crc kubenswrapper[4840]: I1205 16:00:11.919761 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-hvc5g_3b263984-5a9b-45eb-886b-b8209ada6a7a/kube-rbac-proxy/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.042641 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-hvc5g_3b263984-5a9b-45eb-886b-b8209ada6a7a/manager/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.079270 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-5v888_cdf9b744-368d-4c91-8ecf-6a5d983f3eb7/kube-rbac-proxy/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.147156 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-5v888_cdf9b744-368d-4c91-8ecf-6a5d983f3eb7/manager/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.240890 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-spqrr_ae41b596-75b2-46ab-b95a-ef7b41f1e66b/kube-rbac-proxy/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.368768 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-spqrr_ae41b596-75b2-46ab-b95a-ef7b41f1e66b/manager/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.464563 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-xgflv_6c27d60c-a8e1-4616-88b2-391876d4112d/kube-rbac-proxy/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.503306 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-xgflv_6c27d60c-a8e1-4616-88b2-391876d4112d/manager/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.592787 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt_a18cf365-d0a8-4750-b11e-12d608ceb0e9/kube-rbac-proxy/0.log" Dec 05 16:00:12 crc kubenswrapper[4840]: I1205 16:00:12.726704 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt_a18cf365-d0a8-4750-b11e-12d608ceb0e9/manager/0.log" Dec 05 16:00:13 crc kubenswrapper[4840]: I1205 16:00:13.141060 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mvhfv_e3672e49-5b56-4666-8f46-f7846e65b4ba/registry-server/0.log" Dec 05 16:00:13 crc kubenswrapper[4840]: I1205 16:00:13.194673 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7f5df65589-88fmq_6251ea61-296a-4fe0-b2a2-c6de82a74d33/operator/0.log" Dec 05 16:00:13 crc kubenswrapper[4840]: I1205 16:00:13.385154 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-q5ksl_387ba01a-b726-4c0c-b3ab-160be43d9587/kube-rbac-proxy/0.log" Dec 05 16:00:13 crc kubenswrapper[4840]: I1205 16:00:13.530613 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-q5ksl_387ba01a-b726-4c0c-b3ab-160be43d9587/manager/0.log" Dec 05 16:00:13 crc kubenswrapper[4840]: I1205 16:00:13.612523 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8jvnm_31eda6ed-1dee-4670-a6d3-22871423db53/kube-rbac-proxy/0.log" Dec 05 16:00:13 crc kubenswrapper[4840]: I1205 16:00:13.885806 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8jvnm_31eda6ed-1dee-4670-a6d3-22871423db53/manager/0.log" Dec 05 16:00:13 crc kubenswrapper[4840]: I1205 16:00:13.981444 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-r2br9_e1dca245-f390-4f32-8683-eea98ad3fb45/operator/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.026214 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79cb7dcf7d-pw6j8_187efc3a-77ce-4898-89d9-5785491d5d29/manager/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.120019 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-fvrzr_919c5760-f9dd-4b40-9b91-ea3b11d13a26/kube-rbac-proxy/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.172192 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-fvrzr_919c5760-f9dd-4b40-9b91-ea3b11d13a26/manager/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.242171 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-spvpr_f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08/kube-rbac-proxy/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.284354 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-spvpr_f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08/manager/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.480777 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-g5t4m_df20a713-1c9f-4738-8401-ddff0dcf0c38/manager/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.489817 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-g5t4m_df20a713-1c9f-4738-8401-ddff0dcf0c38/kube-rbac-proxy/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.558243 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-chg27_53e9bbdb-0559-4053-b38c-395876f9d69f/kube-rbac-proxy/0.log" Dec 05 16:00:14 crc kubenswrapper[4840]: I1205 16:00:14.635508 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-chg27_53e9bbdb-0559-4053-b38c-395876f9d69f/manager/0.log" Dec 05 16:00:24 crc kubenswrapper[4840]: I1205 16:00:24.066455 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:00:24 crc kubenswrapper[4840]: E1205 16:00:24.067268 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:00:25 crc kubenswrapper[4840]: I1205 16:00:25.205445 4840 scope.go:117] "RemoveContainer" containerID="dab6017c79880ed760c34ba5d16644e1fc182a492c9f84db6b506e36d7c609ca" Dec 05 16:00:32 crc kubenswrapper[4840]: I1205 16:00:32.243124 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-x52gm_e281c913-e265-4ce8-af6a-11f255f6faf1/control-plane-machine-set-operator/0.log" Dec 05 16:00:32 crc kubenswrapper[4840]: I1205 16:00:32.407982 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9lj8m_891a06ac-8101-4fab-a947-2adf9d8eeb7f/kube-rbac-proxy/0.log" Dec 05 16:00:32 crc kubenswrapper[4840]: I1205 16:00:32.432811 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9lj8m_891a06ac-8101-4fab-a947-2adf9d8eeb7f/machine-api-operator/0.log" Dec 05 16:00:37 crc kubenswrapper[4840]: I1205 16:00:37.066912 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:00:37 crc kubenswrapper[4840]: E1205 16:00:37.067744 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:00:44 crc kubenswrapper[4840]: I1205 16:00:44.029418 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-t8hxr_d94bca81-5539-4bc2-bbec-38a88770929d/cert-manager-controller/0.log" Dec 05 16:00:44 crc kubenswrapper[4840]: I1205 16:00:44.183089 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-n8sd9_7d96fedc-8d6a-4b34-af3e-58104249edc2/cert-manager-cainjector/0.log" Dec 05 16:00:44 crc kubenswrapper[4840]: I1205 16:00:44.256049 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-8nl6w_839aa773-117e-423c-853e-37f05ff674a1/cert-manager-webhook/0.log" Dec 05 16:00:50 crc kubenswrapper[4840]: I1205 16:00:50.067200 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:00:50 crc kubenswrapper[4840]: E1205 16:00:50.068112 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:00:56 crc kubenswrapper[4840]: I1205 16:00:56.742857 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-nhx2t_b15f2765-949a-4e53-a48b-1e691b8b1b37/nmstate-console-plugin/0.log" Dec 05 16:00:56 crc kubenswrapper[4840]: I1205 16:00:56.936971 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-pcnxx_8b83469a-e169-45bb-b5b7-ef32a36719f2/nmstate-handler/0.log" Dec 05 16:00:57 crc kubenswrapper[4840]: I1205 16:00:57.036675 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-rldh8_1c6815a9-6a06-4ed1-81fb-fe876b3ff5db/kube-rbac-proxy/0.log" Dec 05 16:00:57 crc kubenswrapper[4840]: I1205 16:00:57.073659 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-rldh8_1c6815a9-6a06-4ed1-81fb-fe876b3ff5db/nmstate-metrics/0.log" Dec 05 16:00:57 crc kubenswrapper[4840]: I1205 16:00:57.184459 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-8zwbs_26f97dff-d2b1-4d3d-b68a-2a8851ea6999/nmstate-operator/0.log" Dec 05 16:00:57 crc kubenswrapper[4840]: I1205 16:00:57.288357 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-2htrv_8ed0ecac-e512-440f-87ad-14e23ea9945f/nmstate-webhook/0.log" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.155295 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415841-6x4dr"] Dec 05 16:01:00 crc kubenswrapper[4840]: E1205 16:01:00.156211 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf" containerName="collect-profiles" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.156242 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf" containerName="collect-profiles" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.156559 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fc7ac20-5d1f-4627-bbc7-c6d1f7ab35bf" containerName="collect-profiles" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.157460 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.170562 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415841-6x4dr"] Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.361875 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwvw4\" (UniqueName: \"kubernetes.io/projected/8e3d6362-d9b5-44ff-a645-076c0611b8f6-kube-api-access-qwvw4\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.362210 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-fernet-keys\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.362276 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-combined-ca-bundle\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.362403 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-config-data\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.464333 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwvw4\" (UniqueName: \"kubernetes.io/projected/8e3d6362-d9b5-44ff-a645-076c0611b8f6-kube-api-access-qwvw4\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.464408 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-fernet-keys\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.464463 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-combined-ca-bundle\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.464513 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-config-data\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.470919 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-config-data\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.471315 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-combined-ca-bundle\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.472893 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-fernet-keys\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.484665 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwvw4\" (UniqueName: \"kubernetes.io/projected/8e3d6362-d9b5-44ff-a645-076c0611b8f6-kube-api-access-qwvw4\") pod \"keystone-cron-29415841-6x4dr\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:00 crc kubenswrapper[4840]: I1205 16:01:00.775162 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:01 crc kubenswrapper[4840]: I1205 16:01:01.241768 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415841-6x4dr"] Dec 05 16:01:01 crc kubenswrapper[4840]: I1205 16:01:01.268124 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415841-6x4dr" event={"ID":"8e3d6362-d9b5-44ff-a645-076c0611b8f6","Type":"ContainerStarted","Data":"532c15bf834962cadb9208dfed5cfd45a4be16b458211076d31fdb09ccd71918"} Dec 05 16:01:02 crc kubenswrapper[4840]: I1205 16:01:02.074283 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:01:02 crc kubenswrapper[4840]: E1205 16:01:02.074772 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:01:02 crc kubenswrapper[4840]: I1205 16:01:02.280125 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415841-6x4dr" event={"ID":"8e3d6362-d9b5-44ff-a645-076c0611b8f6","Type":"ContainerStarted","Data":"10623656a96b0fc98422614e3afe51b95fcfabafe14650c21feccffda4877047"} Dec 05 16:01:02 crc kubenswrapper[4840]: I1205 16:01:02.310691 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415841-6x4dr" podStartSLOduration=2.310665934 podStartE2EDuration="2.310665934s" podCreationTimestamp="2025-12-05 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 16:01:02.299563047 +0000 UTC m=+3740.640625671" watchObservedRunningTime="2025-12-05 16:01:02.310665934 +0000 UTC m=+3740.651728548" Dec 05 16:01:04 crc kubenswrapper[4840]: I1205 16:01:04.338457 4840 generic.go:334] "Generic (PLEG): container finished" podID="8e3d6362-d9b5-44ff-a645-076c0611b8f6" containerID="10623656a96b0fc98422614e3afe51b95fcfabafe14650c21feccffda4877047" exitCode=0 Dec 05 16:01:04 crc kubenswrapper[4840]: I1205 16:01:04.338558 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415841-6x4dr" event={"ID":"8e3d6362-d9b5-44ff-a645-076c0611b8f6","Type":"ContainerDied","Data":"10623656a96b0fc98422614e3afe51b95fcfabafe14650c21feccffda4877047"} Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.712125 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.885327 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwvw4\" (UniqueName: \"kubernetes.io/projected/8e3d6362-d9b5-44ff-a645-076c0611b8f6-kube-api-access-qwvw4\") pod \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.885463 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-config-data\") pod \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.885490 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-fernet-keys\") pod \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.885690 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-combined-ca-bundle\") pod \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\" (UID: \"8e3d6362-d9b5-44ff-a645-076c0611b8f6\") " Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.891276 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8e3d6362-d9b5-44ff-a645-076c0611b8f6" (UID: "8e3d6362-d9b5-44ff-a645-076c0611b8f6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.892046 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e3d6362-d9b5-44ff-a645-076c0611b8f6-kube-api-access-qwvw4" (OuterVolumeSpecName: "kube-api-access-qwvw4") pod "8e3d6362-d9b5-44ff-a645-076c0611b8f6" (UID: "8e3d6362-d9b5-44ff-a645-076c0611b8f6"). InnerVolumeSpecName "kube-api-access-qwvw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.924505 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e3d6362-d9b5-44ff-a645-076c0611b8f6" (UID: "8e3d6362-d9b5-44ff-a645-076c0611b8f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.934730 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-config-data" (OuterVolumeSpecName: "config-data") pod "8e3d6362-d9b5-44ff-a645-076c0611b8f6" (UID: "8e3d6362-d9b5-44ff-a645-076c0611b8f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.988354 4840 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.988405 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwvw4\" (UniqueName: \"kubernetes.io/projected/8e3d6362-d9b5-44ff-a645-076c0611b8f6-kube-api-access-qwvw4\") on node \"crc\" DevicePath \"\"" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.988422 4840 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 16:01:05 crc kubenswrapper[4840]: I1205 16:01:05.988433 4840 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e3d6362-d9b5-44ff-a645-076c0611b8f6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 16:01:06 crc kubenswrapper[4840]: I1205 16:01:06.358795 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415841-6x4dr" event={"ID":"8e3d6362-d9b5-44ff-a645-076c0611b8f6","Type":"ContainerDied","Data":"532c15bf834962cadb9208dfed5cfd45a4be16b458211076d31fdb09ccd71918"} Dec 05 16:01:06 crc kubenswrapper[4840]: I1205 16:01:06.359102 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="532c15bf834962cadb9208dfed5cfd45a4be16b458211076d31fdb09ccd71918" Dec 05 16:01:06 crc kubenswrapper[4840]: I1205 16:01:06.358853 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415841-6x4dr" Dec 05 16:01:12 crc kubenswrapper[4840]: I1205 16:01:12.501738 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-bbxvm_233d6836-c070-4d58-8f3d-6145a065240d/kube-rbac-proxy/0.log" Dec 05 16:01:12 crc kubenswrapper[4840]: I1205 16:01:12.664913 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-bbxvm_233d6836-c070-4d58-8f3d-6145a065240d/controller/0.log" Dec 05 16:01:12 crc kubenswrapper[4840]: I1205 16:01:12.732465 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:01:12 crc kubenswrapper[4840]: I1205 16:01:12.883924 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:01:12 crc kubenswrapper[4840]: I1205 16:01:12.938438 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:01:12 crc kubenswrapper[4840]: I1205 16:01:12.976078 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.001369 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.300419 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.325701 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.392741 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.400956 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.583352 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.598732 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.602644 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/controller/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.611687 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.881901 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/frr-metrics/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.913584 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/kube-rbac-proxy/0.log" Dec 05 16:01:13 crc kubenswrapper[4840]: I1205 16:01:13.927124 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/kube-rbac-proxy-frr/0.log" Dec 05 16:01:14 crc kubenswrapper[4840]: I1205 16:01:14.139704 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/reloader/0.log" Dec 05 16:01:14 crc kubenswrapper[4840]: I1205 16:01:14.162844 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-xhqsd_146837b6-fb7d-4368-9ae3-bc4106ff72de/frr-k8s-webhook-server/0.log" Dec 05 16:01:14 crc kubenswrapper[4840]: I1205 16:01:14.583074 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6c5f98fb9d-c7t9b_5debe082-c97c-4bb6-8eb6-475c0b97e485/manager/0.log" Dec 05 16:01:14 crc kubenswrapper[4840]: I1205 16:01:14.679707 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-f894b867-vrfmt_b51981bb-b0ed-4c6a-b82d-ae3155eefcb5/webhook-server/0.log" Dec 05 16:01:14 crc kubenswrapper[4840]: I1205 16:01:14.830695 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8v7db_475d466e-01af-4a8b-85c9-585a152bc376/kube-rbac-proxy/0.log" Dec 05 16:01:15 crc kubenswrapper[4840]: I1205 16:01:15.339469 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8v7db_475d466e-01af-4a8b-85c9-585a152bc376/speaker/0.log" Dec 05 16:01:15 crc kubenswrapper[4840]: I1205 16:01:15.384707 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/frr/0.log" Dec 05 16:01:16 crc kubenswrapper[4840]: I1205 16:01:16.066700 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:01:16 crc kubenswrapper[4840]: E1205 16:01:16.067246 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:01:27 crc kubenswrapper[4840]: I1205 16:01:27.066293 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:01:27 crc kubenswrapper[4840]: E1205 16:01:27.067076 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.011001 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/util/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.208546 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/util/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.230582 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/pull/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.273802 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/pull/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.422037 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/pull/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.463787 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/extract/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.469168 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/util/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.575042 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/util/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.802258 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/util/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.811152 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/pull/0.log" Dec 05 16:01:28 crc kubenswrapper[4840]: I1205 16:01:28.833290 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/pull/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.015638 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/util/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.016499 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/pull/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.033646 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/extract/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.174901 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-utilities/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.397020 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-content/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.400353 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-utilities/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.417749 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-content/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.617498 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-utilities/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.626150 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-content/0.log" Dec 05 16:01:29 crc kubenswrapper[4840]: I1205 16:01:29.872803 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-utilities/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.110808 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-utilities/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.141971 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-content/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.156796 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/registry-server/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.163038 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-content/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.336955 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-utilities/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.360369 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-content/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.584709 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/registry-server/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.601486 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mv2cn_1581af24-4229-4cac-a548-20cafe277dff/marketplace-operator/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.669462 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-utilities/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.939325 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-content/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.969527 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-utilities/0.log" Dec 05 16:01:30 crc kubenswrapper[4840]: I1205 16:01:30.977145 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-content/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.155263 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-utilities/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.164464 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-content/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.336193 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/registry-server/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.395944 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-utilities/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.562889 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-utilities/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.608576 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-content/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.617316 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-content/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.735707 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-utilities/0.log" Dec 05 16:01:31 crc kubenswrapper[4840]: I1205 16:01:31.805823 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-content/0.log" Dec 05 16:01:32 crc kubenswrapper[4840]: I1205 16:01:32.463493 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/registry-server/0.log" Dec 05 16:01:39 crc kubenswrapper[4840]: I1205 16:01:39.066649 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:01:39 crc kubenswrapper[4840]: E1205 16:01:39.067458 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:01:50 crc kubenswrapper[4840]: I1205 16:01:50.070522 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:01:50 crc kubenswrapper[4840]: E1205 16:01:50.071490 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:02:04 crc kubenswrapper[4840]: I1205 16:02:04.073130 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:02:04 crc kubenswrapper[4840]: E1205 16:02:04.073929 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:02:06 crc kubenswrapper[4840]: E1205 16:02:06.006835 4840 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.195:35064->38.102.83.195:36337: write tcp 38.102.83.195:35064->38.102.83.195:36337: write: broken pipe Dec 05 16:02:16 crc kubenswrapper[4840]: I1205 16:02:16.069645 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:02:16 crc kubenswrapper[4840]: E1205 16:02:16.070402 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:02:31 crc kubenswrapper[4840]: I1205 16:02:31.066513 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:02:31 crc kubenswrapper[4840]: E1205 16:02:31.067301 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:02:44 crc kubenswrapper[4840]: I1205 16:02:44.067165 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:02:44 crc kubenswrapper[4840]: E1205 16:02:44.068071 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:02:58 crc kubenswrapper[4840]: I1205 16:02:58.067478 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:02:58 crc kubenswrapper[4840]: E1205 16:02:58.069116 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:03:10 crc kubenswrapper[4840]: I1205 16:03:10.067959 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:03:10 crc kubenswrapper[4840]: E1205 16:03:10.069230 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:03:14 crc kubenswrapper[4840]: I1205 16:03:14.179386 4840 generic.go:334] "Generic (PLEG): container finished" podID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerID="3a9e278faaf7e5b15f4c9ce5d4459d193157ec69236072b270bc278ca0615954" exitCode=0 Dec 05 16:03:14 crc kubenswrapper[4840]: I1205 16:03:14.179986 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" event={"ID":"60370ced-19ef-46f7-bbee-9766d1bcda64","Type":"ContainerDied","Data":"3a9e278faaf7e5b15f4c9ce5d4459d193157ec69236072b270bc278ca0615954"} Dec 05 16:03:14 crc kubenswrapper[4840]: I1205 16:03:14.180433 4840 scope.go:117] "RemoveContainer" containerID="3a9e278faaf7e5b15f4c9ce5d4459d193157ec69236072b270bc278ca0615954" Dec 05 16:03:15 crc kubenswrapper[4840]: I1205 16:03:15.198541 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mv5m4_must-gather-8gqpk_60370ced-19ef-46f7-bbee-9766d1bcda64/gather/0.log" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.045021 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mv5m4/must-gather-8gqpk"] Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.045781 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerName="copy" containerID="cri-o://51335d43b71109b7b3096628d1776223d549b23e1c66f3d211e9debb0223bf3f" gracePeriod=2 Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.053985 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mv5m4/must-gather-8gqpk"] Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.067131 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:03:23 crc kubenswrapper[4840]: E1205 16:03:23.067750 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.279113 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mv5m4_must-gather-8gqpk_60370ced-19ef-46f7-bbee-9766d1bcda64/copy/0.log" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.279720 4840 generic.go:334] "Generic (PLEG): container finished" podID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerID="51335d43b71109b7b3096628d1776223d549b23e1c66f3d211e9debb0223bf3f" exitCode=143 Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.454588 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mv5m4_must-gather-8gqpk_60370ced-19ef-46f7-bbee-9766d1bcda64/copy/0.log" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.454937 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.584773 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/60370ced-19ef-46f7-bbee-9766d1bcda64-must-gather-output\") pod \"60370ced-19ef-46f7-bbee-9766d1bcda64\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.584958 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bp8v\" (UniqueName: \"kubernetes.io/projected/60370ced-19ef-46f7-bbee-9766d1bcda64-kube-api-access-9bp8v\") pod \"60370ced-19ef-46f7-bbee-9766d1bcda64\" (UID: \"60370ced-19ef-46f7-bbee-9766d1bcda64\") " Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.598408 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60370ced-19ef-46f7-bbee-9766d1bcda64-kube-api-access-9bp8v" (OuterVolumeSpecName: "kube-api-access-9bp8v") pod "60370ced-19ef-46f7-bbee-9766d1bcda64" (UID: "60370ced-19ef-46f7-bbee-9766d1bcda64"). InnerVolumeSpecName "kube-api-access-9bp8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.686627 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bp8v\" (UniqueName: \"kubernetes.io/projected/60370ced-19ef-46f7-bbee-9766d1bcda64-kube-api-access-9bp8v\") on node \"crc\" DevicePath \"\"" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.740765 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60370ced-19ef-46f7-bbee-9766d1bcda64-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "60370ced-19ef-46f7-bbee-9766d1bcda64" (UID: "60370ced-19ef-46f7-bbee-9766d1bcda64"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:03:23 crc kubenswrapper[4840]: I1205 16:03:23.791218 4840 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/60370ced-19ef-46f7-bbee-9766d1bcda64-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 16:03:24 crc kubenswrapper[4840]: I1205 16:03:24.077457 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" path="/var/lib/kubelet/pods/60370ced-19ef-46f7-bbee-9766d1bcda64/volumes" Dec 05 16:03:24 crc kubenswrapper[4840]: I1205 16:03:24.298683 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mv5m4_must-gather-8gqpk_60370ced-19ef-46f7-bbee-9766d1bcda64/copy/0.log" Dec 05 16:03:24 crc kubenswrapper[4840]: I1205 16:03:24.299110 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mv5m4/must-gather-8gqpk" Dec 05 16:03:24 crc kubenswrapper[4840]: I1205 16:03:24.299107 4840 scope.go:117] "RemoveContainer" containerID="51335d43b71109b7b3096628d1776223d549b23e1c66f3d211e9debb0223bf3f" Dec 05 16:03:24 crc kubenswrapper[4840]: I1205 16:03:24.322747 4840 scope.go:117] "RemoveContainer" containerID="3a9e278faaf7e5b15f4c9ce5d4459d193157ec69236072b270bc278ca0615954" Dec 05 16:03:35 crc kubenswrapper[4840]: I1205 16:03:35.067431 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:03:35 crc kubenswrapper[4840]: E1205 16:03:35.068393 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:03:49 crc kubenswrapper[4840]: I1205 16:03:49.066362 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:03:49 crc kubenswrapper[4840]: E1205 16:03:49.067243 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:04:01 crc kubenswrapper[4840]: I1205 16:04:01.066708 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:04:01 crc kubenswrapper[4840]: E1205 16:04:01.067654 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:04:14 crc kubenswrapper[4840]: I1205 16:04:14.067267 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:04:14 crc kubenswrapper[4840]: E1205 16:04:14.068001 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:04:25 crc kubenswrapper[4840]: I1205 16:04:25.338745 4840 scope.go:117] "RemoveContainer" containerID="7e2255b73bb8114a1985516eda145a94f1edcc23f51fa9b3d59b662338428a2e" Dec 05 16:04:26 crc kubenswrapper[4840]: I1205 16:04:26.067718 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:04:27 crc kubenswrapper[4840]: I1205 16:04:27.187003 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"24301b3a21dcacee3bf216aee4db41bf2d104b03938ab88fe2b2936fc478b964"} Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.256625 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zjgkd"] Dec 05 16:05:32 crc kubenswrapper[4840]: E1205 16:05:32.257614 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerName="gather" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.257635 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerName="gather" Dec 05 16:05:32 crc kubenswrapper[4840]: E1205 16:05:32.257647 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerName="copy" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.257653 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerName="copy" Dec 05 16:05:32 crc kubenswrapper[4840]: E1205 16:05:32.257671 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e3d6362-d9b5-44ff-a645-076c0611b8f6" containerName="keystone-cron" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.257677 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e3d6362-d9b5-44ff-a645-076c0611b8f6" containerName="keystone-cron" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.257936 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e3d6362-d9b5-44ff-a645-076c0611b8f6" containerName="keystone-cron" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.257954 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerName="copy" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.257970 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="60370ced-19ef-46f7-bbee-9766d1bcda64" containerName="gather" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.259443 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.284236 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjgkd"] Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.384786 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-catalog-content\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.385026 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwnrx\" (UniqueName: \"kubernetes.io/projected/154e7975-e7f2-42f7-9d3c-5415c24b12ae-kube-api-access-nwnrx\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.385090 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-utilities\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.486789 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwnrx\" (UniqueName: \"kubernetes.io/projected/154e7975-e7f2-42f7-9d3c-5415c24b12ae-kube-api-access-nwnrx\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.486861 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-utilities\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.486925 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-catalog-content\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.487550 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-utilities\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.487554 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-catalog-content\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.516134 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwnrx\" (UniqueName: \"kubernetes.io/projected/154e7975-e7f2-42f7-9d3c-5415c24b12ae-kube-api-access-nwnrx\") pod \"redhat-marketplace-zjgkd\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:32 crc kubenswrapper[4840]: I1205 16:05:32.583302 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.081767 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjgkd"] Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.244578 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6ddm9"] Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.247615 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.263209 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6ddm9"] Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.447694 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-catalog-content\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.447981 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-utilities\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.448236 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7qb6\" (UniqueName: \"kubernetes.io/projected/c1281477-d43b-46e6-8604-96da41d2c380-kube-api-access-t7qb6\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.550015 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-utilities\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.550157 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7qb6\" (UniqueName: \"kubernetes.io/projected/c1281477-d43b-46e6-8604-96da41d2c380-kube-api-access-t7qb6\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.550255 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-catalog-content\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.550754 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-catalog-content\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.550713 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-utilities\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.579534 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7qb6\" (UniqueName: \"kubernetes.io/projected/c1281477-d43b-46e6-8604-96da41d2c380-kube-api-access-t7qb6\") pod \"redhat-operators-6ddm9\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.581172 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.878826 4840 generic.go:334] "Generic (PLEG): container finished" podID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerID="783f1812fcc1a72277a9cee9d9f94b6b2496b0cd4e4bbe2a9633be170e4ca4e1" exitCode=0 Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.879237 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjgkd" event={"ID":"154e7975-e7f2-42f7-9d3c-5415c24b12ae","Type":"ContainerDied","Data":"783f1812fcc1a72277a9cee9d9f94b6b2496b0cd4e4bbe2a9633be170e4ca4e1"} Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.879277 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjgkd" event={"ID":"154e7975-e7f2-42f7-9d3c-5415c24b12ae","Type":"ContainerStarted","Data":"d71f3db9780cffed61bb899dcbf94c0b4b09a8c22ddf1ad0012704effa98b4af"} Dec 05 16:05:33 crc kubenswrapper[4840]: I1205 16:05:33.883656 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 16:05:34 crc kubenswrapper[4840]: W1205 16:05:34.079648 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1281477_d43b_46e6_8604_96da41d2c380.slice/crio-6b1b64c302b7464e603e471de5268125a152c594f693832538a90d6c3d87d817 WatchSource:0}: Error finding container 6b1b64c302b7464e603e471de5268125a152c594f693832538a90d6c3d87d817: Status 404 returned error can't find the container with id 6b1b64c302b7464e603e471de5268125a152c594f693832538a90d6c3d87d817 Dec 05 16:05:34 crc kubenswrapper[4840]: I1205 16:05:34.080482 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6ddm9"] Dec 05 16:05:34 crc kubenswrapper[4840]: I1205 16:05:34.888031 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjgkd" event={"ID":"154e7975-e7f2-42f7-9d3c-5415c24b12ae","Type":"ContainerStarted","Data":"ec6ac6f1548a1f246e8d430aaff27fce09a7983b5ce5d161fed4e900f4343164"} Dec 05 16:05:34 crc kubenswrapper[4840]: I1205 16:05:34.977555 4840 generic.go:334] "Generic (PLEG): container finished" podID="c1281477-d43b-46e6-8604-96da41d2c380" containerID="f2c5805e013a3d6cbb09842b540c51ab4550d4533eff1cb077e93b490427fe81" exitCode=0 Dec 05 16:05:34 crc kubenswrapper[4840]: I1205 16:05:34.977603 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ddm9" event={"ID":"c1281477-d43b-46e6-8604-96da41d2c380","Type":"ContainerDied","Data":"f2c5805e013a3d6cbb09842b540c51ab4550d4533eff1cb077e93b490427fe81"} Dec 05 16:05:34 crc kubenswrapper[4840]: I1205 16:05:34.977635 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ddm9" event={"ID":"c1281477-d43b-46e6-8604-96da41d2c380","Type":"ContainerStarted","Data":"6b1b64c302b7464e603e471de5268125a152c594f693832538a90d6c3d87d817"} Dec 05 16:05:35 crc kubenswrapper[4840]: I1205 16:05:35.992818 4840 generic.go:334] "Generic (PLEG): container finished" podID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerID="ec6ac6f1548a1f246e8d430aaff27fce09a7983b5ce5d161fed4e900f4343164" exitCode=0 Dec 05 16:05:35 crc kubenswrapper[4840]: I1205 16:05:35.993191 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjgkd" event={"ID":"154e7975-e7f2-42f7-9d3c-5415c24b12ae","Type":"ContainerDied","Data":"ec6ac6f1548a1f246e8d430aaff27fce09a7983b5ce5d161fed4e900f4343164"} Dec 05 16:05:35 crc kubenswrapper[4840]: I1205 16:05:35.998323 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ddm9" event={"ID":"c1281477-d43b-46e6-8604-96da41d2c380","Type":"ContainerStarted","Data":"e58a1fb94f5357614bc8b769f01b4969ce428c090d63ff6efd505aa53789a84f"} Dec 05 16:05:37 crc kubenswrapper[4840]: I1205 16:05:37.011460 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjgkd" event={"ID":"154e7975-e7f2-42f7-9d3c-5415c24b12ae","Type":"ContainerStarted","Data":"b3c38d035d9d90210a2971ab0b28b58bd1a533e193a0ae21378f3297a3ae7f46"} Dec 05 16:05:37 crc kubenswrapper[4840]: I1205 16:05:37.014849 4840 generic.go:334] "Generic (PLEG): container finished" podID="c1281477-d43b-46e6-8604-96da41d2c380" containerID="e58a1fb94f5357614bc8b769f01b4969ce428c090d63ff6efd505aa53789a84f" exitCode=0 Dec 05 16:05:37 crc kubenswrapper[4840]: I1205 16:05:37.014918 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ddm9" event={"ID":"c1281477-d43b-46e6-8604-96da41d2c380","Type":"ContainerDied","Data":"e58a1fb94f5357614bc8b769f01b4969ce428c090d63ff6efd505aa53789a84f"} Dec 05 16:05:37 crc kubenswrapper[4840]: I1205 16:05:37.037194 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zjgkd" podStartSLOduration=2.196649574 podStartE2EDuration="5.037172084s" podCreationTimestamp="2025-12-05 16:05:32 +0000 UTC" firstStartedPulling="2025-12-05 16:05:33.883371308 +0000 UTC m=+4012.224433922" lastFinishedPulling="2025-12-05 16:05:36.723893818 +0000 UTC m=+4015.064956432" observedRunningTime="2025-12-05 16:05:37.032690856 +0000 UTC m=+4015.373753470" watchObservedRunningTime="2025-12-05 16:05:37.037172084 +0000 UTC m=+4015.378234698" Dec 05 16:05:38 crc kubenswrapper[4840]: I1205 16:05:38.031224 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ddm9" event={"ID":"c1281477-d43b-46e6-8604-96da41d2c380","Type":"ContainerStarted","Data":"868b62aa5de54cbbe23ba2969c87c4b74b6a9118276fb5d58fd3194ba4728d09"} Dec 05 16:05:38 crc kubenswrapper[4840]: I1205 16:05:38.059107 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6ddm9" podStartSLOduration=2.6392222309999998 podStartE2EDuration="5.059082767s" podCreationTimestamp="2025-12-05 16:05:33 +0000 UTC" firstStartedPulling="2025-12-05 16:05:34.980581113 +0000 UTC m=+4013.321643727" lastFinishedPulling="2025-12-05 16:05:37.400441649 +0000 UTC m=+4015.741504263" observedRunningTime="2025-12-05 16:05:38.050629576 +0000 UTC m=+4016.391692190" watchObservedRunningTime="2025-12-05 16:05:38.059082767 +0000 UTC m=+4016.400145381" Dec 05 16:05:42 crc kubenswrapper[4840]: I1205 16:05:42.584179 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:42 crc kubenswrapper[4840]: I1205 16:05:42.584810 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:42 crc kubenswrapper[4840]: I1205 16:05:42.644567 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:43 crc kubenswrapper[4840]: I1205 16:05:43.303227 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:43 crc kubenswrapper[4840]: I1205 16:05:43.439645 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjgkd"] Dec 05 16:05:43 crc kubenswrapper[4840]: I1205 16:05:43.582322 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:43 crc kubenswrapper[4840]: I1205 16:05:43.582381 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:44 crc kubenswrapper[4840]: I1205 16:05:44.512420 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:44 crc kubenswrapper[4840]: I1205 16:05:44.567389 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:45 crc kubenswrapper[4840]: I1205 16:05:45.259912 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zjgkd" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="registry-server" containerID="cri-o://b3c38d035d9d90210a2971ab0b28b58bd1a533e193a0ae21378f3297a3ae7f46" gracePeriod=2 Dec 05 16:05:45 crc kubenswrapper[4840]: I1205 16:05:45.969696 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6ddm9"] Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.272733 4840 generic.go:334] "Generic (PLEG): container finished" podID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerID="b3c38d035d9d90210a2971ab0b28b58bd1a533e193a0ae21378f3297a3ae7f46" exitCode=0 Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.272829 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjgkd" event={"ID":"154e7975-e7f2-42f7-9d3c-5415c24b12ae","Type":"ContainerDied","Data":"b3c38d035d9d90210a2971ab0b28b58bd1a533e193a0ae21378f3297a3ae7f46"} Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.273260 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zjgkd" event={"ID":"154e7975-e7f2-42f7-9d3c-5415c24b12ae","Type":"ContainerDied","Data":"d71f3db9780cffed61bb899dcbf94c0b4b09a8c22ddf1ad0012704effa98b4af"} Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.273287 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d71f3db9780cffed61bb899dcbf94c0b4b09a8c22ddf1ad0012704effa98b4af" Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.273558 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6ddm9" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="registry-server" containerID="cri-o://868b62aa5de54cbbe23ba2969c87c4b74b6a9118276fb5d58fd3194ba4728d09" gracePeriod=2 Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.674607 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.680643 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-utilities\") pod \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.680741 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwnrx\" (UniqueName: \"kubernetes.io/projected/154e7975-e7f2-42f7-9d3c-5415c24b12ae-kube-api-access-nwnrx\") pod \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.680784 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-catalog-content\") pod \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\" (UID: \"154e7975-e7f2-42f7-9d3c-5415c24b12ae\") " Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.681973 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-utilities" (OuterVolumeSpecName: "utilities") pod "154e7975-e7f2-42f7-9d3c-5415c24b12ae" (UID: "154e7975-e7f2-42f7-9d3c-5415c24b12ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.688661 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/154e7975-e7f2-42f7-9d3c-5415c24b12ae-kube-api-access-nwnrx" (OuterVolumeSpecName: "kube-api-access-nwnrx") pod "154e7975-e7f2-42f7-9d3c-5415c24b12ae" (UID: "154e7975-e7f2-42f7-9d3c-5415c24b12ae"). InnerVolumeSpecName "kube-api-access-nwnrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.705478 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "154e7975-e7f2-42f7-9d3c-5415c24b12ae" (UID: "154e7975-e7f2-42f7-9d3c-5415c24b12ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.782432 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.782482 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwnrx\" (UniqueName: \"kubernetes.io/projected/154e7975-e7f2-42f7-9d3c-5415c24b12ae-kube-api-access-nwnrx\") on node \"crc\" DevicePath \"\"" Dec 05 16:05:46 crc kubenswrapper[4840]: I1205 16:05:46.782493 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/154e7975-e7f2-42f7-9d3c-5415c24b12ae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 16:05:47 crc kubenswrapper[4840]: I1205 16:05:47.344018 4840 generic.go:334] "Generic (PLEG): container finished" podID="c1281477-d43b-46e6-8604-96da41d2c380" containerID="868b62aa5de54cbbe23ba2969c87c4b74b6a9118276fb5d58fd3194ba4728d09" exitCode=0 Dec 05 16:05:47 crc kubenswrapper[4840]: I1205 16:05:47.344214 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ddm9" event={"ID":"c1281477-d43b-46e6-8604-96da41d2c380","Type":"ContainerDied","Data":"868b62aa5de54cbbe23ba2969c87c4b74b6a9118276fb5d58fd3194ba4728d09"} Dec 05 16:05:47 crc kubenswrapper[4840]: I1205 16:05:47.344435 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zjgkd" Dec 05 16:05:47 crc kubenswrapper[4840]: I1205 16:05:47.377683 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjgkd"] Dec 05 16:05:47 crc kubenswrapper[4840]: I1205 16:05:47.386726 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zjgkd"] Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.041342 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.047493 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7qb6\" (UniqueName: \"kubernetes.io/projected/c1281477-d43b-46e6-8604-96da41d2c380-kube-api-access-t7qb6\") pod \"c1281477-d43b-46e6-8604-96da41d2c380\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.047642 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-catalog-content\") pod \"c1281477-d43b-46e6-8604-96da41d2c380\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.047667 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-utilities\") pod \"c1281477-d43b-46e6-8604-96da41d2c380\" (UID: \"c1281477-d43b-46e6-8604-96da41d2c380\") " Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.049128 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-utilities" (OuterVolumeSpecName: "utilities") pod "c1281477-d43b-46e6-8604-96da41d2c380" (UID: "c1281477-d43b-46e6-8604-96da41d2c380"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.052743 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1281477-d43b-46e6-8604-96da41d2c380-kube-api-access-t7qb6" (OuterVolumeSpecName: "kube-api-access-t7qb6") pod "c1281477-d43b-46e6-8604-96da41d2c380" (UID: "c1281477-d43b-46e6-8604-96da41d2c380"). InnerVolumeSpecName "kube-api-access-t7qb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.086430 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" path="/var/lib/kubelet/pods/154e7975-e7f2-42f7-9d3c-5415c24b12ae/volumes" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.149778 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7qb6\" (UniqueName: \"kubernetes.io/projected/c1281477-d43b-46e6-8604-96da41d2c380-kube-api-access-t7qb6\") on node \"crc\" DevicePath \"\"" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.150129 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.163672 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1281477-d43b-46e6-8604-96da41d2c380" (UID: "c1281477-d43b-46e6-8604-96da41d2c380"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.252147 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1281477-d43b-46e6-8604-96da41d2c380-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.359378 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ddm9" event={"ID":"c1281477-d43b-46e6-8604-96da41d2c380","Type":"ContainerDied","Data":"6b1b64c302b7464e603e471de5268125a152c594f693832538a90d6c3d87d817"} Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.359461 4840 scope.go:117] "RemoveContainer" containerID="868b62aa5de54cbbe23ba2969c87c4b74b6a9118276fb5d58fd3194ba4728d09" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.359659 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ddm9" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.382315 4840 scope.go:117] "RemoveContainer" containerID="e58a1fb94f5357614bc8b769f01b4969ce428c090d63ff6efd505aa53789a84f" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.404569 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6ddm9"] Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.611659 4840 scope.go:117] "RemoveContainer" containerID="f2c5805e013a3d6cbb09842b540c51ab4550d4533eff1cb077e93b490427fe81" Dec 05 16:05:48 crc kubenswrapper[4840]: I1205 16:05:48.623884 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6ddm9"] Dec 05 16:05:50 crc kubenswrapper[4840]: I1205 16:05:50.077190 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1281477-d43b-46e6-8604-96da41d2c380" path="/var/lib/kubelet/pods/c1281477-d43b-46e6-8604-96da41d2c380/volumes" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.020846 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dpph9/must-gather-mdm2k"] Dec 05 16:06:15 crc kubenswrapper[4840]: E1205 16:06:15.021907 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="registry-server" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.021927 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="registry-server" Dec 05 16:06:15 crc kubenswrapper[4840]: E1205 16:06:15.021947 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="extract-content" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.021953 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="extract-content" Dec 05 16:06:15 crc kubenswrapper[4840]: E1205 16:06:15.021969 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="extract-utilities" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.021976 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="extract-utilities" Dec 05 16:06:15 crc kubenswrapper[4840]: E1205 16:06:15.021990 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="extract-utilities" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.021996 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="extract-utilities" Dec 05 16:06:15 crc kubenswrapper[4840]: E1205 16:06:15.022023 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="extract-content" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.022029 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="extract-content" Dec 05 16:06:15 crc kubenswrapper[4840]: E1205 16:06:15.022038 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="registry-server" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.022044 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="registry-server" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.022270 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="154e7975-e7f2-42f7-9d3c-5415c24b12ae" containerName="registry-server" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.022282 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1281477-d43b-46e6-8604-96da41d2c380" containerName="registry-server" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.023558 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.025704 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dpph9"/"kube-root-ca.crt" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.025735 4840 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dpph9"/"openshift-service-ca.crt" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.026265 4840 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-dpph9"/"default-dockercfg-mscpj" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.040443 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dpph9/must-gather-mdm2k"] Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.140849 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvwnw\" (UniqueName: \"kubernetes.io/projected/54f86601-22c7-465e-8408-510506f3e79e-kube-api-access-pvwnw\") pod \"must-gather-mdm2k\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.143820 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/54f86601-22c7-465e-8408-510506f3e79e-must-gather-output\") pod \"must-gather-mdm2k\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.245149 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvwnw\" (UniqueName: \"kubernetes.io/projected/54f86601-22c7-465e-8408-510506f3e79e-kube-api-access-pvwnw\") pod \"must-gather-mdm2k\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.245284 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/54f86601-22c7-465e-8408-510506f3e79e-must-gather-output\") pod \"must-gather-mdm2k\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.246109 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/54f86601-22c7-465e-8408-510506f3e79e-must-gather-output\") pod \"must-gather-mdm2k\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.272657 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvwnw\" (UniqueName: \"kubernetes.io/projected/54f86601-22c7-465e-8408-510506f3e79e-kube-api-access-pvwnw\") pod \"must-gather-mdm2k\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.351385 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:06:15 crc kubenswrapper[4840]: I1205 16:06:15.794025 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dpph9/must-gather-mdm2k"] Dec 05 16:06:16 crc kubenswrapper[4840]: I1205 16:06:16.664462 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/must-gather-mdm2k" event={"ID":"54f86601-22c7-465e-8408-510506f3e79e","Type":"ContainerStarted","Data":"304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72"} Dec 05 16:06:16 crc kubenswrapper[4840]: I1205 16:06:16.665045 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/must-gather-mdm2k" event={"ID":"54f86601-22c7-465e-8408-510506f3e79e","Type":"ContainerStarted","Data":"ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234"} Dec 05 16:06:16 crc kubenswrapper[4840]: I1205 16:06:16.665064 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/must-gather-mdm2k" event={"ID":"54f86601-22c7-465e-8408-510506f3e79e","Type":"ContainerStarted","Data":"a4962053f279b04c791b99667781053e362765767f3b7af2041aaab046da0a57"} Dec 05 16:06:16 crc kubenswrapper[4840]: I1205 16:06:16.682437 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dpph9/must-gather-mdm2k" podStartSLOduration=2.6824167450000003 podStartE2EDuration="2.682416745s" podCreationTimestamp="2025-12-05 16:06:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 16:06:16.676566738 +0000 UTC m=+4055.017629352" watchObservedRunningTime="2025-12-05 16:06:16.682416745 +0000 UTC m=+4055.023479359" Dec 05 16:06:19 crc kubenswrapper[4840]: I1205 16:06:19.650789 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dpph9/crc-debug-vshqn"] Dec 05 16:06:19 crc kubenswrapper[4840]: I1205 16:06:19.652705 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:19 crc kubenswrapper[4840]: I1205 16:06:19.731055 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svkcx\" (UniqueName: \"kubernetes.io/projected/718d864a-54b1-4e26-8895-bad2a20ae321-kube-api-access-svkcx\") pod \"crc-debug-vshqn\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:19 crc kubenswrapper[4840]: I1205 16:06:19.731424 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/718d864a-54b1-4e26-8895-bad2a20ae321-host\") pod \"crc-debug-vshqn\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:19 crc kubenswrapper[4840]: I1205 16:06:19.832394 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svkcx\" (UniqueName: \"kubernetes.io/projected/718d864a-54b1-4e26-8895-bad2a20ae321-kube-api-access-svkcx\") pod \"crc-debug-vshqn\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:19 crc kubenswrapper[4840]: I1205 16:06:19.832467 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/718d864a-54b1-4e26-8895-bad2a20ae321-host\") pod \"crc-debug-vshqn\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:19 crc kubenswrapper[4840]: I1205 16:06:19.832607 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/718d864a-54b1-4e26-8895-bad2a20ae321-host\") pod \"crc-debug-vshqn\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:20 crc kubenswrapper[4840]: I1205 16:06:20.271877 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svkcx\" (UniqueName: \"kubernetes.io/projected/718d864a-54b1-4e26-8895-bad2a20ae321-kube-api-access-svkcx\") pod \"crc-debug-vshqn\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:20 crc kubenswrapper[4840]: I1205 16:06:20.277261 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:20 crc kubenswrapper[4840]: I1205 16:06:20.701100 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/crc-debug-vshqn" event={"ID":"718d864a-54b1-4e26-8895-bad2a20ae321","Type":"ContainerStarted","Data":"db7cd54d131aa37cdf7493dc6d0a29975832f2d92fe55f680b9a4cbd4aa10600"} Dec 05 16:06:20 crc kubenswrapper[4840]: I1205 16:06:20.701666 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/crc-debug-vshqn" event={"ID":"718d864a-54b1-4e26-8895-bad2a20ae321","Type":"ContainerStarted","Data":"f3ec4cef8f2280d33185056f20541bc202b37be1ab39a555b42c19f1b42b4c11"} Dec 05 16:06:20 crc kubenswrapper[4840]: I1205 16:06:20.718379 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dpph9/crc-debug-vshqn" podStartSLOduration=1.7183576299999999 podStartE2EDuration="1.71835763s" podCreationTimestamp="2025-12-05 16:06:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 16:06:20.713706047 +0000 UTC m=+4059.054768661" watchObservedRunningTime="2025-12-05 16:06:20.71835763 +0000 UTC m=+4059.059420244" Dec 05 16:06:49 crc kubenswrapper[4840]: I1205 16:06:49.473105 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 16:06:49 crc kubenswrapper[4840]: I1205 16:06:49.473489 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 16:06:53 crc kubenswrapper[4840]: W1205 16:06:53.359860 4840 helpers.go:245] readString: Failed to read "/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod718d864a_54b1_4e26_8895_bad2a20ae321.slice/crio-conmon-db7cd54d131aa37cdf7493dc6d0a29975832f2d92fe55f680b9a4cbd4aa10600.scope/cpuset.cpus.effective": read /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod718d864a_54b1_4e26_8895_bad2a20ae321.slice/crio-conmon-db7cd54d131aa37cdf7493dc6d0a29975832f2d92fe55f680b9a4cbd4aa10600.scope/cpuset.cpus.effective: no such device Dec 05 16:06:53 crc kubenswrapper[4840]: E1205 16:06:53.470135 4840 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod718d864a_54b1_4e26_8895_bad2a20ae321.slice/crio-db7cd54d131aa37cdf7493dc6d0a29975832f2d92fe55f680b9a4cbd4aa10600.scope\": RecentStats: unable to find data in memory cache]" Dec 05 16:06:54 crc kubenswrapper[4840]: I1205 16:06:54.229730 4840 generic.go:334] "Generic (PLEG): container finished" podID="718d864a-54b1-4e26-8895-bad2a20ae321" containerID="db7cd54d131aa37cdf7493dc6d0a29975832f2d92fe55f680b9a4cbd4aa10600" exitCode=0 Dec 05 16:06:54 crc kubenswrapper[4840]: I1205 16:06:54.229808 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/crc-debug-vshqn" event={"ID":"718d864a-54b1-4e26-8895-bad2a20ae321","Type":"ContainerDied","Data":"db7cd54d131aa37cdf7493dc6d0a29975832f2d92fe55f680b9a4cbd4aa10600"} Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.351348 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.394460 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dpph9/crc-debug-vshqn"] Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.402822 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/718d864a-54b1-4e26-8895-bad2a20ae321-host\") pod \"718d864a-54b1-4e26-8895-bad2a20ae321\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.402965 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/718d864a-54b1-4e26-8895-bad2a20ae321-host" (OuterVolumeSpecName: "host") pod "718d864a-54b1-4e26-8895-bad2a20ae321" (UID: "718d864a-54b1-4e26-8895-bad2a20ae321"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.403055 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svkcx\" (UniqueName: \"kubernetes.io/projected/718d864a-54b1-4e26-8895-bad2a20ae321-kube-api-access-svkcx\") pod \"718d864a-54b1-4e26-8895-bad2a20ae321\" (UID: \"718d864a-54b1-4e26-8895-bad2a20ae321\") " Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.403532 4840 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/718d864a-54b1-4e26-8895-bad2a20ae321-host\") on node \"crc\" DevicePath \"\"" Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.404213 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dpph9/crc-debug-vshqn"] Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.419411 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/718d864a-54b1-4e26-8895-bad2a20ae321-kube-api-access-svkcx" (OuterVolumeSpecName: "kube-api-access-svkcx") pod "718d864a-54b1-4e26-8895-bad2a20ae321" (UID: "718d864a-54b1-4e26-8895-bad2a20ae321"). InnerVolumeSpecName "kube-api-access-svkcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:06:55 crc kubenswrapper[4840]: I1205 16:06:55.505345 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svkcx\" (UniqueName: \"kubernetes.io/projected/718d864a-54b1-4e26-8895-bad2a20ae321-kube-api-access-svkcx\") on node \"crc\" DevicePath \"\"" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.077432 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="718d864a-54b1-4e26-8895-bad2a20ae321" path="/var/lib/kubelet/pods/718d864a-54b1-4e26-8895-bad2a20ae321/volumes" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.247941 4840 scope.go:117] "RemoveContainer" containerID="db7cd54d131aa37cdf7493dc6d0a29975832f2d92fe55f680b9a4cbd4aa10600" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.248047 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-vshqn" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.566804 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dpph9/crc-debug-g77rt"] Dec 05 16:06:56 crc kubenswrapper[4840]: E1205 16:06:56.567601 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="718d864a-54b1-4e26-8895-bad2a20ae321" containerName="container-00" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.567620 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="718d864a-54b1-4e26-8895-bad2a20ae321" containerName="container-00" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.567923 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="718d864a-54b1-4e26-8895-bad2a20ae321" containerName="container-00" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.568700 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.624930 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkmn8\" (UniqueName: \"kubernetes.io/projected/1518c47b-9e34-453e-b38b-79e484830307-kube-api-access-jkmn8\") pod \"crc-debug-g77rt\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.625150 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1518c47b-9e34-453e-b38b-79e484830307-host\") pod \"crc-debug-g77rt\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.726708 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkmn8\" (UniqueName: \"kubernetes.io/projected/1518c47b-9e34-453e-b38b-79e484830307-kube-api-access-jkmn8\") pod \"crc-debug-g77rt\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.726820 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1518c47b-9e34-453e-b38b-79e484830307-host\") pod \"crc-debug-g77rt\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.727083 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1518c47b-9e34-453e-b38b-79e484830307-host\") pod \"crc-debug-g77rt\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.756731 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkmn8\" (UniqueName: \"kubernetes.io/projected/1518c47b-9e34-453e-b38b-79e484830307-kube-api-access-jkmn8\") pod \"crc-debug-g77rt\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:56 crc kubenswrapper[4840]: I1205 16:06:56.887254 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:57 crc kubenswrapper[4840]: I1205 16:06:57.259806 4840 generic.go:334] "Generic (PLEG): container finished" podID="1518c47b-9e34-453e-b38b-79e484830307" containerID="c2d313afb5887ebc7b946cc95a3f8257f29486a32c67e8bcb539fdc32554353b" exitCode=0 Dec 05 16:06:57 crc kubenswrapper[4840]: I1205 16:06:57.259895 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/crc-debug-g77rt" event={"ID":"1518c47b-9e34-453e-b38b-79e484830307","Type":"ContainerDied","Data":"c2d313afb5887ebc7b946cc95a3f8257f29486a32c67e8bcb539fdc32554353b"} Dec 05 16:06:57 crc kubenswrapper[4840]: I1205 16:06:57.260232 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/crc-debug-g77rt" event={"ID":"1518c47b-9e34-453e-b38b-79e484830307","Type":"ContainerStarted","Data":"b98092104bc5cfa300555612a15ea66950b115001a7c1c12127f76f6758bc7ec"} Dec 05 16:06:57 crc kubenswrapper[4840]: I1205 16:06:57.715052 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dpph9/crc-debug-g77rt"] Dec 05 16:06:57 crc kubenswrapper[4840]: I1205 16:06:57.723702 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dpph9/crc-debug-g77rt"] Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.367718 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.483010 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1518c47b-9e34-453e-b38b-79e484830307-host\") pod \"1518c47b-9e34-453e-b38b-79e484830307\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.483137 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkmn8\" (UniqueName: \"kubernetes.io/projected/1518c47b-9e34-453e-b38b-79e484830307-kube-api-access-jkmn8\") pod \"1518c47b-9e34-453e-b38b-79e484830307\" (UID: \"1518c47b-9e34-453e-b38b-79e484830307\") " Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.483561 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1518c47b-9e34-453e-b38b-79e484830307-host" (OuterVolumeSpecName: "host") pod "1518c47b-9e34-453e-b38b-79e484830307" (UID: "1518c47b-9e34-453e-b38b-79e484830307"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.498200 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1518c47b-9e34-453e-b38b-79e484830307-kube-api-access-jkmn8" (OuterVolumeSpecName: "kube-api-access-jkmn8") pod "1518c47b-9e34-453e-b38b-79e484830307" (UID: "1518c47b-9e34-453e-b38b-79e484830307"). InnerVolumeSpecName "kube-api-access-jkmn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.585208 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkmn8\" (UniqueName: \"kubernetes.io/projected/1518c47b-9e34-453e-b38b-79e484830307-kube-api-access-jkmn8\") on node \"crc\" DevicePath \"\"" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.585251 4840 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1518c47b-9e34-453e-b38b-79e484830307-host\") on node \"crc\" DevicePath \"\"" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.972971 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dpph9/crc-debug-4h4x2"] Dec 05 16:06:58 crc kubenswrapper[4840]: E1205 16:06:58.973838 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1518c47b-9e34-453e-b38b-79e484830307" containerName="container-00" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.973878 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="1518c47b-9e34-453e-b38b-79e484830307" containerName="container-00" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.974148 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="1518c47b-9e34-453e-b38b-79e484830307" containerName="container-00" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.975008 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.993611 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-host\") pod \"crc-debug-4h4x2\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:58 crc kubenswrapper[4840]: I1205 16:06:58.993727 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htj2b\" (UniqueName: \"kubernetes.io/projected/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-kube-api-access-htj2b\") pod \"crc-debug-4h4x2\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:59 crc kubenswrapper[4840]: I1205 16:06:59.095354 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-host\") pod \"crc-debug-4h4x2\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:59 crc kubenswrapper[4840]: I1205 16:06:59.095433 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-host\") pod \"crc-debug-4h4x2\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:59 crc kubenswrapper[4840]: I1205 16:06:59.095709 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htj2b\" (UniqueName: \"kubernetes.io/projected/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-kube-api-access-htj2b\") pod \"crc-debug-4h4x2\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:59 crc kubenswrapper[4840]: I1205 16:06:59.114569 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htj2b\" (UniqueName: \"kubernetes.io/projected/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-kube-api-access-htj2b\") pod \"crc-debug-4h4x2\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:59 crc kubenswrapper[4840]: I1205 16:06:59.279183 4840 scope.go:117] "RemoveContainer" containerID="c2d313afb5887ebc7b946cc95a3f8257f29486a32c67e8bcb539fdc32554353b" Dec 05 16:06:59 crc kubenswrapper[4840]: I1205 16:06:59.279199 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-g77rt" Dec 05 16:06:59 crc kubenswrapper[4840]: I1205 16:06:59.291677 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:06:59 crc kubenswrapper[4840]: W1205 16:06:59.335702 4840 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcb6db80_9c7f_47bd_991e_b6ef012ee58f.slice/crio-eb64ca920f9eeacd58bf29f73d2e172a61a28784eef6efe4d2d5455cf3ed5c5b WatchSource:0}: Error finding container eb64ca920f9eeacd58bf29f73d2e172a61a28784eef6efe4d2d5455cf3ed5c5b: Status 404 returned error can't find the container with id eb64ca920f9eeacd58bf29f73d2e172a61a28784eef6efe4d2d5455cf3ed5c5b Dec 05 16:07:00 crc kubenswrapper[4840]: I1205 16:07:00.082904 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1518c47b-9e34-453e-b38b-79e484830307" path="/var/lib/kubelet/pods/1518c47b-9e34-453e-b38b-79e484830307/volumes" Dec 05 16:07:00 crc kubenswrapper[4840]: I1205 16:07:00.293776 4840 generic.go:334] "Generic (PLEG): container finished" podID="bcb6db80-9c7f-47bd-991e-b6ef012ee58f" containerID="79776e0a7b4319435b35338b0a76b5569a56dbafdfce438b5ead7c558fd1bce0" exitCode=0 Dec 05 16:07:00 crc kubenswrapper[4840]: I1205 16:07:00.293854 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/crc-debug-4h4x2" event={"ID":"bcb6db80-9c7f-47bd-991e-b6ef012ee58f","Type":"ContainerDied","Data":"79776e0a7b4319435b35338b0a76b5569a56dbafdfce438b5ead7c558fd1bce0"} Dec 05 16:07:00 crc kubenswrapper[4840]: I1205 16:07:00.294147 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/crc-debug-4h4x2" event={"ID":"bcb6db80-9c7f-47bd-991e-b6ef012ee58f","Type":"ContainerStarted","Data":"eb64ca920f9eeacd58bf29f73d2e172a61a28784eef6efe4d2d5455cf3ed5c5b"} Dec 05 16:07:00 crc kubenswrapper[4840]: I1205 16:07:00.342323 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dpph9/crc-debug-4h4x2"] Dec 05 16:07:00 crc kubenswrapper[4840]: I1205 16:07:00.358887 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dpph9/crc-debug-4h4x2"] Dec 05 16:07:01 crc kubenswrapper[4840]: I1205 16:07:01.411438 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:07:01 crc kubenswrapper[4840]: I1205 16:07:01.556352 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-host\") pod \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " Dec 05 16:07:01 crc kubenswrapper[4840]: I1205 16:07:01.556540 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-host" (OuterVolumeSpecName: "host") pod "bcb6db80-9c7f-47bd-991e-b6ef012ee58f" (UID: "bcb6db80-9c7f-47bd-991e-b6ef012ee58f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 16:07:01 crc kubenswrapper[4840]: I1205 16:07:01.556568 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htj2b\" (UniqueName: \"kubernetes.io/projected/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-kube-api-access-htj2b\") pod \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\" (UID: \"bcb6db80-9c7f-47bd-991e-b6ef012ee58f\") " Dec 05 16:07:01 crc kubenswrapper[4840]: I1205 16:07:01.557208 4840 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-host\") on node \"crc\" DevicePath \"\"" Dec 05 16:07:02 crc kubenswrapper[4840]: I1205 16:07:02.262080 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-kube-api-access-htj2b" (OuterVolumeSpecName: "kube-api-access-htj2b") pod "bcb6db80-9c7f-47bd-991e-b6ef012ee58f" (UID: "bcb6db80-9c7f-47bd-991e-b6ef012ee58f"). InnerVolumeSpecName "kube-api-access-htj2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:07:02 crc kubenswrapper[4840]: I1205 16:07:02.285072 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htj2b\" (UniqueName: \"kubernetes.io/projected/bcb6db80-9c7f-47bd-991e-b6ef012ee58f-kube-api-access-htj2b\") on node \"crc\" DevicePath \"\"" Dec 05 16:07:02 crc kubenswrapper[4840]: I1205 16:07:02.314907 4840 scope.go:117] "RemoveContainer" containerID="79776e0a7b4319435b35338b0a76b5569a56dbafdfce438b5ead7c558fd1bce0" Dec 05 16:07:02 crc kubenswrapper[4840]: I1205 16:07:02.314976 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/crc-debug-4h4x2" Dec 05 16:07:04 crc kubenswrapper[4840]: I1205 16:07:04.078458 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcb6db80-9c7f-47bd-991e-b6ef012ee58f" path="/var/lib/kubelet/pods/bcb6db80-9c7f-47bd-991e-b6ef012ee58f/volumes" Dec 05 16:07:19 crc kubenswrapper[4840]: I1205 16:07:19.555088 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 16:07:19 crc kubenswrapper[4840]: I1205 16:07:19.555640 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 16:07:30 crc kubenswrapper[4840]: I1205 16:07:30.377987 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-897596894-64sgb_9eb8c11f-7d8a-4330-804c-e9fa74cd10e7/barbican-api/0.log" Dec 05 16:07:30 crc kubenswrapper[4840]: I1205 16:07:30.668712 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-897596894-64sgb_9eb8c11f-7d8a-4330-804c-e9fa74cd10e7/barbican-api-log/0.log" Dec 05 16:07:30 crc kubenswrapper[4840]: I1205 16:07:30.700830 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-57cfd5878-qwpfg_7002d387-4756-4e68-b238-6e9cbf1d9b10/barbican-keystone-listener/0.log" Dec 05 16:07:30 crc kubenswrapper[4840]: I1205 16:07:30.763782 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-57cfd5878-qwpfg_7002d387-4756-4e68-b238-6e9cbf1d9b10/barbican-keystone-listener-log/0.log" Dec 05 16:07:30 crc kubenswrapper[4840]: I1205 16:07:30.905715 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-67bdc7dbc-8r6xf_9670574b-79d6-495e-abc8-123bf1582742/barbican-worker/0.log" Dec 05 16:07:30 crc kubenswrapper[4840]: I1205 16:07:30.951698 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-67bdc7dbc-8r6xf_9670574b-79d6-495e-abc8-123bf1582742/barbican-worker-log/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.152636 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-h48pt_3298a054-72de-4060-95c4-ff42a8ed3a7f/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.167946 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/ceilometer-notification-agent/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.202860 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/ceilometer-central-agent/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.315932 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/sg-core/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.501072 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_7d48a78c-f903-4b87-8c99-622c71bac6aa/proxy-httpd/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.590457 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b2379ec8-f983-42df-9255-2a97b8589b6d/cinder-api/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.649139 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b2379ec8-f983-42df-9255-2a97b8589b6d/cinder-api-log/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.831740 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bd99a832-4ebb-49a0-88ae-89f2b247da8a/probe/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.853623 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_bd99a832-4ebb-49a0-88ae-89f2b247da8a/cinder-scheduler/0.log" Dec 05 16:07:31 crc kubenswrapper[4840]: I1205 16:07:31.975113 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jftg5_997f1855-be81-4a43-94c8-2f6001a12c0d/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.103497 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-cxxxx_23c1423f-c01b-4a22-b2de-63e6a8646eed/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.183725 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-dkpw9_0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe/init/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.405217 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-dkpw9_0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe/dnsmasq-dns/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.421195 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-dkpw9_0594160f-f4bb-40c3-b7e0-9dc2bb2cf1fe/init/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.431966 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tjftf_51bf8de7-4ac8-4478-af9d-7b438f6afb1c/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.600876 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b8843aac-3856-4c2d-80d4-d3f642065c75/glance-httpd/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.604581 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b8843aac-3856-4c2d-80d4-d3f642065c75/glance-log/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.798770 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6326423c-2a7f-4f3c-b361-de370bd51817/glance-httpd/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.819623 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_6326423c-2a7f-4f3c-b361-de370bd51817/glance-log/0.log" Dec 05 16:07:32 crc kubenswrapper[4840]: I1205 16:07:32.960668 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7b99464548-lx7k9_d384faa1-3b3b-45f8-bf4b-902236ec40da/horizon/0.log" Dec 05 16:07:33 crc kubenswrapper[4840]: I1205 16:07:33.279319 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-97vkv_d89744d6-5d83-4152-8a17-ab5bddf86ad9/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:33 crc kubenswrapper[4840]: I1205 16:07:33.429915 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-t788v_30a7c1b0-8c3f-48e7-be82-bc57e708cd5e/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:33 crc kubenswrapper[4840]: I1205 16:07:33.479338 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7b99464548-lx7k9_d384faa1-3b3b-45f8-bf4b-902236ec40da/horizon-log/0.log" Dec 05 16:07:33 crc kubenswrapper[4840]: I1205 16:07:33.699679 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415841-6x4dr_8e3d6362-d9b5-44ff-a645-076c0611b8f6/keystone-cron/0.log" Dec 05 16:07:33 crc kubenswrapper[4840]: I1205 16:07:33.740092 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7b75bf6fbf-wj8dt_c98e01c4-b177-45c9-9f0b-bd02f90fe5d2/keystone-api/0.log" Dec 05 16:07:33 crc kubenswrapper[4840]: I1205 16:07:33.886797 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_b4cc279e-ffd3-45a6-94cb-e787194bb137/kube-state-metrics/0.log" Dec 05 16:07:34 crc kubenswrapper[4840]: I1205 16:07:34.071094 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-kjnxk_453e239f-2acb-42cb-a617-35975fb5437a/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:34 crc kubenswrapper[4840]: I1205 16:07:34.436024 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d557dcbfc-24qsg_3f464dff-9cae-4492-9e99-7d0343ecefbe/neutron-api/0.log" Dec 05 16:07:34 crc kubenswrapper[4840]: I1205 16:07:34.451991 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d557dcbfc-24qsg_3f464dff-9cae-4492-9e99-7d0343ecefbe/neutron-httpd/0.log" Dec 05 16:07:34 crc kubenswrapper[4840]: I1205 16:07:34.753842 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-qt7wp_e9a9f7dd-12db-447d-a9d9-f279b5f72f5b/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:35 crc kubenswrapper[4840]: I1205 16:07:35.185847 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4d40873e-c669-464c-8c3d-bf5d60c99e62/nova-api-log/0.log" Dec 05 16:07:35 crc kubenswrapper[4840]: I1205 16:07:35.329111 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_5c921bef-fe53-4e64-bf34-1faa504c8a15/nova-cell0-conductor-conductor/0.log" Dec 05 16:07:35 crc kubenswrapper[4840]: I1205 16:07:35.580474 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_ef118f1d-aaf7-48be-b9aa-ec84d23ea999/nova-cell1-conductor-conductor/0.log" Dec 05 16:07:35 crc kubenswrapper[4840]: I1205 16:07:35.608094 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4d40873e-c669-464c-8c3d-bf5d60c99e62/nova-api-api/0.log" Dec 05 16:07:35 crc kubenswrapper[4840]: I1205 16:07:35.823055 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_c5bca6a7-9654-492d-9687-e7672c18117f/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 16:07:35 crc kubenswrapper[4840]: I1205 16:07:35.935414 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-4c2f4_c64b6a7d-6e39-40f8-8837-660f386a357e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:36 crc kubenswrapper[4840]: I1205 16:07:36.142857 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0853ab99-c6ae-4ea4-8aa3-119437720120/nova-metadata-log/0.log" Dec 05 16:07:36 crc kubenswrapper[4840]: I1205 16:07:36.359333 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_28fec705-baad-4e89-94d4-e1e7d64579a1/mysql-bootstrap/0.log" Dec 05 16:07:36 crc kubenswrapper[4840]: I1205 16:07:36.472356 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_863cd7d3-d2a4-44eb-88c8-c3cd9259cb78/nova-scheduler-scheduler/0.log" Dec 05 16:07:36 crc kubenswrapper[4840]: I1205 16:07:36.555838 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_28fec705-baad-4e89-94d4-e1e7d64579a1/mysql-bootstrap/0.log" Dec 05 16:07:36 crc kubenswrapper[4840]: I1205 16:07:36.588890 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_28fec705-baad-4e89-94d4-e1e7d64579a1/galera/0.log" Dec 05 16:07:36 crc kubenswrapper[4840]: I1205 16:07:36.766034 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b1a586cf-ff08-4975-b172-0167bb10ff77/mysql-bootstrap/0.log" Dec 05 16:07:36 crc kubenswrapper[4840]: I1205 16:07:36.950330 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b1a586cf-ff08-4975-b172-0167bb10ff77/galera/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.029400 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b1a586cf-ff08-4975-b172-0167bb10ff77/mysql-bootstrap/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.181688 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_304fce22-b828-4844-9db0-13120847afc1/openstackclient/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.272511 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-cttvn_ab69b71e-d666-46a7-a896-96a70fff685a/ovn-controller/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.488275 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-kbwmc_3548faf3-ee23-449d-b44c-5858d2cdc9ec/openstack-network-exporter/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.525208 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0853ab99-c6ae-4ea4-8aa3-119437720120/nova-metadata-metadata/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.616918 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovsdb-server-init/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.875760 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovsdb-server-init/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.875888 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovs-vswitchd/0.log" Dec 05 16:07:37 crc kubenswrapper[4840]: I1205 16:07:37.892239 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rnf5z_f75e819c-db65-4ab4-8530-1390b8a83dd0/ovsdb-server/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.116416 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1457a36a-acaa-42e9-b5ea-7667c272d25d/ovn-northd/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.148521 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1457a36a-acaa-42e9-b5ea-7667c272d25d/openstack-network-exporter/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.152758 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-sclgc_d0c216f7-ce43-4852-9788-e1f5e5705ec4/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.315757 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0fdc90f7-3261-4c8f-860b-c5f3890d3470/openstack-network-exporter/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.389017 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0fdc90f7-3261-4c8f-860b-c5f3890d3470/ovsdbserver-nb/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.523922 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7548669d-ea2b-4442-b4b6-f3408d636798/ovsdbserver-sb/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.535123 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7548669d-ea2b-4442-b4b6-f3408d636798/openstack-network-exporter/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.744235 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5d878656b-vljqr_dc5ac202-1b33-4a65-aab2-d5fe6e62f844/placement-api/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.862978 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0b4037e-9bd6-4a53-84b3-941d72023ce3/setup-container/0.log" Dec 05 16:07:38 crc kubenswrapper[4840]: I1205 16:07:38.872187 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5d878656b-vljqr_dc5ac202-1b33-4a65-aab2-d5fe6e62f844/placement-log/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.009398 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0b4037e-9bd6-4a53-84b3-941d72023ce3/setup-container/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.077360 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4b53cdac-e8cf-4dc5-abed-0d20e7ca8140/setup-container/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.086338 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0b4037e-9bd6-4a53-84b3-941d72023ce3/rabbitmq/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.311014 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4b53cdac-e8cf-4dc5-abed-0d20e7ca8140/setup-container/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.330541 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4b53cdac-e8cf-4dc5-abed-0d20e7ca8140/rabbitmq/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.470733 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-96xww_b0e3c935-42f8-456b-8870-a4ca2f9fce1d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.621653 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-4m26l_740d3f73-d31d-4a95-9830-ed5545f8525a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.729169 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-sxlt7_50a15428-e663-42af-a044-01daa7f04c93/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:39 crc kubenswrapper[4840]: I1205 16:07:39.959453 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-thbrf_54e23b4a-dfba-45da-9197-cfbdcbd4ccfe/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.102090 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-8sfl9_0e16306a-b5d0-468b-b0a2-b19ad5af4592/ssh-known-hosts-edpm-deployment/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.241105 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d87df5bc-m4vp7_8217ba67-c10d-43b2-8e12-41c6c25aa2da/proxy-server/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.300552 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d87df5bc-m4vp7_8217ba67-c10d-43b2-8e12-41c6c25aa2da/proxy-httpd/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.415758 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-b696t_45ba7f21-a1e8-4443-816f-91c5392f62df/swift-ring-rebalance/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.592163 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-reaper/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.593968 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-auditor/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.697911 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-replicator/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.800528 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/account-server/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.807173 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-auditor/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.833771 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-replicator/0.log" Dec 05 16:07:40 crc kubenswrapper[4840]: I1205 16:07:40.897064 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-server/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.045830 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/container-updater/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.052526 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-expirer/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.063660 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-auditor/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.142979 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-replicator/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.204594 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-server/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.311499 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/object-updater/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.325934 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/rsync/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.356953 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_49fa86fd-482b-426d-9ec6-2c963600851e/swift-recon-cron/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.579831 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-76pjd_d51bc3d7-3ce5-4967-ba22-71cef47d25d1/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.605147 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_d2e8a783-170e-44cb-a505-1ee2a96572af/tempest-tests-tempest-tests-runner/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.761358 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f37d61a2-7a86-40a3-9394-0ca13367e28f/test-operator-logs-container/0.log" Dec 05 16:07:41 crc kubenswrapper[4840]: I1205 16:07:41.842008 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-6zs5f_d55f8919-f3d7-4080-9573-b92529c9ec9f/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.520533 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nnxrj"] Dec 05 16:07:43 crc kubenswrapper[4840]: E1205 16:07:43.521650 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcb6db80-9c7f-47bd-991e-b6ef012ee58f" containerName="container-00" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.521679 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcb6db80-9c7f-47bd-991e-b6ef012ee58f" containerName="container-00" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.522027 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcb6db80-9c7f-47bd-991e-b6ef012ee58f" containerName="container-00" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.527996 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.564111 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nnxrj"] Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.686115 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-catalog-content\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.686191 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-utilities\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.686280 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x4sz\" (UniqueName: \"kubernetes.io/projected/fd140a3e-3b46-40cc-a885-9c63346f92e9-kube-api-access-4x4sz\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.788003 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-utilities\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.788263 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x4sz\" (UniqueName: \"kubernetes.io/projected/fd140a3e-3b46-40cc-a885-9c63346f92e9-kube-api-access-4x4sz\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.788396 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-catalog-content\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.788533 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-utilities\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:43 crc kubenswrapper[4840]: I1205 16:07:43.788971 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-catalog-content\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:44 crc kubenswrapper[4840]: I1205 16:07:44.066109 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x4sz\" (UniqueName: \"kubernetes.io/projected/fd140a3e-3b46-40cc-a885-9c63346f92e9-kube-api-access-4x4sz\") pod \"community-operators-nnxrj\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:44 crc kubenswrapper[4840]: I1205 16:07:44.154337 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:45 crc kubenswrapper[4840]: I1205 16:07:45.761789 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nnxrj"] Dec 05 16:07:46 crc kubenswrapper[4840]: I1205 16:07:46.364982 4840 generic.go:334] "Generic (PLEG): container finished" podID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerID="32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb" exitCode=0 Dec 05 16:07:46 crc kubenswrapper[4840]: I1205 16:07:46.365289 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nnxrj" event={"ID":"fd140a3e-3b46-40cc-a885-9c63346f92e9","Type":"ContainerDied","Data":"32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb"} Dec 05 16:07:46 crc kubenswrapper[4840]: I1205 16:07:46.365330 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nnxrj" event={"ID":"fd140a3e-3b46-40cc-a885-9c63346f92e9","Type":"ContainerStarted","Data":"a04e6c8c378c90ce61cb172c3d8375e033a3661300508089b7da8a6d3e9aa1c5"} Dec 05 16:07:47 crc kubenswrapper[4840]: I1205 16:07:47.382902 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nnxrj" event={"ID":"fd140a3e-3b46-40cc-a885-9c63346f92e9","Type":"ContainerStarted","Data":"00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413"} Dec 05 16:07:48 crc kubenswrapper[4840]: I1205 16:07:48.392603 4840 generic.go:334] "Generic (PLEG): container finished" podID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerID="00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413" exitCode=0 Dec 05 16:07:48 crc kubenswrapper[4840]: I1205 16:07:48.392645 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nnxrj" event={"ID":"fd140a3e-3b46-40cc-a885-9c63346f92e9","Type":"ContainerDied","Data":"00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413"} Dec 05 16:07:49 crc kubenswrapper[4840]: I1205 16:07:49.402827 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nnxrj" event={"ID":"fd140a3e-3b46-40cc-a885-9c63346f92e9","Type":"ContainerStarted","Data":"f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4"} Dec 05 16:07:49 crc kubenswrapper[4840]: I1205 16:07:49.431816 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nnxrj" podStartSLOduration=4.016345617 podStartE2EDuration="6.431793148s" podCreationTimestamp="2025-12-05 16:07:43 +0000 UTC" firstStartedPulling="2025-12-05 16:07:46.366858701 +0000 UTC m=+4144.707921315" lastFinishedPulling="2025-12-05 16:07:48.782306232 +0000 UTC m=+4147.123368846" observedRunningTime="2025-12-05 16:07:49.430078839 +0000 UTC m=+4147.771141453" watchObservedRunningTime="2025-12-05 16:07:49.431793148 +0000 UTC m=+4147.772855762" Dec 05 16:07:49 crc kubenswrapper[4840]: I1205 16:07:49.471757 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 16:07:49 crc kubenswrapper[4840]: I1205 16:07:49.471825 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 16:07:49 crc kubenswrapper[4840]: I1205 16:07:49.471933 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 16:07:49 crc kubenswrapper[4840]: I1205 16:07:49.472745 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"24301b3a21dcacee3bf216aee4db41bf2d104b03938ab88fe2b2936fc478b964"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 16:07:49 crc kubenswrapper[4840]: I1205 16:07:49.472820 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://24301b3a21dcacee3bf216aee4db41bf2d104b03938ab88fe2b2936fc478b964" gracePeriod=600 Dec 05 16:07:50 crc kubenswrapper[4840]: I1205 16:07:50.418487 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="24301b3a21dcacee3bf216aee4db41bf2d104b03938ab88fe2b2936fc478b964" exitCode=0 Dec 05 16:07:50 crc kubenswrapper[4840]: I1205 16:07:50.418542 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"24301b3a21dcacee3bf216aee4db41bf2d104b03938ab88fe2b2936fc478b964"} Dec 05 16:07:50 crc kubenswrapper[4840]: I1205 16:07:50.419038 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerStarted","Data":"51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18"} Dec 05 16:07:50 crc kubenswrapper[4840]: I1205 16:07:50.419071 4840 scope.go:117] "RemoveContainer" containerID="4f21ba840b0971a4f841843c0d5ecf8ed5ff59cac69ddf6c50dc9a92f07cdc96" Dec 05 16:07:53 crc kubenswrapper[4840]: I1205 16:07:53.229569 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_bd532161-0cce-4e82-b084-45e1569c1575/memcached/0.log" Dec 05 16:07:54 crc kubenswrapper[4840]: I1205 16:07:54.156032 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:54 crc kubenswrapper[4840]: I1205 16:07:54.156346 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:54 crc kubenswrapper[4840]: I1205 16:07:54.361640 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:54 crc kubenswrapper[4840]: I1205 16:07:54.527888 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:54 crc kubenswrapper[4840]: I1205 16:07:54.606375 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nnxrj"] Dec 05 16:07:56 crc kubenswrapper[4840]: I1205 16:07:56.487531 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nnxrj" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="registry-server" containerID="cri-o://f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4" gracePeriod=2 Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.038921 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.229697 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x4sz\" (UniqueName: \"kubernetes.io/projected/fd140a3e-3b46-40cc-a885-9c63346f92e9-kube-api-access-4x4sz\") pod \"fd140a3e-3b46-40cc-a885-9c63346f92e9\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.229848 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-utilities\") pod \"fd140a3e-3b46-40cc-a885-9c63346f92e9\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.229900 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-catalog-content\") pod \"fd140a3e-3b46-40cc-a885-9c63346f92e9\" (UID: \"fd140a3e-3b46-40cc-a885-9c63346f92e9\") " Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.230963 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-utilities" (OuterVolumeSpecName: "utilities") pod "fd140a3e-3b46-40cc-a885-9c63346f92e9" (UID: "fd140a3e-3b46-40cc-a885-9c63346f92e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.246293 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd140a3e-3b46-40cc-a885-9c63346f92e9-kube-api-access-4x4sz" (OuterVolumeSpecName: "kube-api-access-4x4sz") pod "fd140a3e-3b46-40cc-a885-9c63346f92e9" (UID: "fd140a3e-3b46-40cc-a885-9c63346f92e9"). InnerVolumeSpecName "kube-api-access-4x4sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.299312 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd140a3e-3b46-40cc-a885-9c63346f92e9" (UID: "fd140a3e-3b46-40cc-a885-9c63346f92e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.339142 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.339171 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd140a3e-3b46-40cc-a885-9c63346f92e9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.339184 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x4sz\" (UniqueName: \"kubernetes.io/projected/fd140a3e-3b46-40cc-a885-9c63346f92e9-kube-api-access-4x4sz\") on node \"crc\" DevicePath \"\"" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.498752 4840 generic.go:334] "Generic (PLEG): container finished" podID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerID="f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4" exitCode=0 Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.498798 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nnxrj" event={"ID":"fd140a3e-3b46-40cc-a885-9c63346f92e9","Type":"ContainerDied","Data":"f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4"} Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.498827 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nnxrj" event={"ID":"fd140a3e-3b46-40cc-a885-9c63346f92e9","Type":"ContainerDied","Data":"a04e6c8c378c90ce61cb172c3d8375e033a3661300508089b7da8a6d3e9aa1c5"} Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.498844 4840 scope.go:117] "RemoveContainer" containerID="f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.499027 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nnxrj" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.523464 4840 scope.go:117] "RemoveContainer" containerID="00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.531542 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nnxrj"] Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.544479 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nnxrj"] Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.550417 4840 scope.go:117] "RemoveContainer" containerID="32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.597146 4840 scope.go:117] "RemoveContainer" containerID="f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4" Dec 05 16:07:57 crc kubenswrapper[4840]: E1205 16:07:57.597591 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4\": container with ID starting with f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4 not found: ID does not exist" containerID="f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.597635 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4"} err="failed to get container status \"f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4\": rpc error: code = NotFound desc = could not find container \"f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4\": container with ID starting with f96c9d17837bdf84d2d1ecd8121ef8b752f94bd0ccd4eddfef3a51b4a506f8f4 not found: ID does not exist" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.597660 4840 scope.go:117] "RemoveContainer" containerID="00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413" Dec 05 16:07:57 crc kubenswrapper[4840]: E1205 16:07:57.598008 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413\": container with ID starting with 00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413 not found: ID does not exist" containerID="00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.598036 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413"} err="failed to get container status \"00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413\": rpc error: code = NotFound desc = could not find container \"00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413\": container with ID starting with 00cb2e148fce257092520a6ae2ded34ef4cab5b3b859851e4e2701c63a61d413 not found: ID does not exist" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.598051 4840 scope.go:117] "RemoveContainer" containerID="32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb" Dec 05 16:07:57 crc kubenswrapper[4840]: E1205 16:07:57.598347 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb\": container with ID starting with 32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb not found: ID does not exist" containerID="32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb" Dec 05 16:07:57 crc kubenswrapper[4840]: I1205 16:07:57.598407 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb"} err="failed to get container status \"32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb\": rpc error: code = NotFound desc = could not find container \"32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb\": container with ID starting with 32a654df8367e6c0d5742d1d0e2685097165c7708615a5d32bca507a226893eb not found: ID does not exist" Dec 05 16:07:58 crc kubenswrapper[4840]: I1205 16:07:58.076808 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" path="/var/lib/kubelet/pods/fd140a3e-3b46-40cc-a885-9c63346f92e9/volumes" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.122073 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/util/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.262167 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/util/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.267616 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/pull/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.283131 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/pull/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.443467 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/pull/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.451226 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/util/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.475619 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8229f1413af36b4be655cc8a7db1890c9f79cd1628ac17d2a736d37882q8crf_7a20aa55-efd0-473a-9582-e7812dd599ab/extract/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.671237 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7zmt7_c79f46e0-5947-4b4a-b581-0e49736fb41f/kube-rbac-proxy/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.722708 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7zmt7_c79f46e0-5947-4b4a-b581-0e49736fb41f/manager/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.752615 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dcbzq_94defa3c-b83c-44b9-83c0-e92bdf7944be/kube-rbac-proxy/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.918135 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-dcbzq_94defa3c-b83c-44b9-83c0-e92bdf7944be/manager/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.936257 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-8vw7g_9f897b64-3aeb-44c6-a340-9e0082876e93/kube-rbac-proxy/0.log" Dec 05 16:08:10 crc kubenswrapper[4840]: I1205 16:08:10.992127 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-8vw7g_9f897b64-3aeb-44c6-a340-9e0082876e93/manager/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.105271 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fwvd2_c15ac393-953d-45e9-b8dc-7212c6e2366b/kube-rbac-proxy/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.212759 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-fwvd2_c15ac393-953d-45e9-b8dc-7212c6e2366b/manager/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.301113 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-qz4dx_546f9401-ad92-49f1-836a-8e240bbc2d61/kube-rbac-proxy/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.328137 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-qz4dx_546f9401-ad92-49f1-836a-8e240bbc2d61/manager/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.407057 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-fmbzp_ff2da961-d2c6-486f-87bf-2394ee00a5a1/kube-rbac-proxy/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.507841 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-fmbzp_ff2da961-d2c6-486f-87bf-2394ee00a5a1/manager/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.565097 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-gpcmz_8e7074a0-bae6-49e7-8915-c4cb3242108d/kube-rbac-proxy/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.722958 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-q2pp7_48a2075e-30bb-41fb-a311-fb9b593182c6/kube-rbac-proxy/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.749061 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-gpcmz_8e7074a0-bae6-49e7-8915-c4cb3242108d/manager/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.803520 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-q2pp7_48a2075e-30bb-41fb-a311-fb9b593182c6/manager/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.966758 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-zjbwl_6edb6d08-9885-457f-8642-ef77c64de97a/kube-rbac-proxy/0.log" Dec 05 16:08:11 crc kubenswrapper[4840]: I1205 16:08:11.978675 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-zjbwl_6edb6d08-9885-457f-8642-ef77c64de97a/manager/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.121804 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n6chf_683c5938-459a-4c60-bb98-8237f6ddc4f6/kube-rbac-proxy/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.168569 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-n6chf_683c5938-459a-4c60-bb98-8237f6ddc4f6/manager/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.252646 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-hvc5g_3b263984-5a9b-45eb-886b-b8209ada6a7a/kube-rbac-proxy/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.342789 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-hvc5g_3b263984-5a9b-45eb-886b-b8209ada6a7a/manager/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.391485 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-5v888_cdf9b744-368d-4c91-8ecf-6a5d983f3eb7/kube-rbac-proxy/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.455431 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-5v888_cdf9b744-368d-4c91-8ecf-6a5d983f3eb7/manager/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.514325 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-spqrr_ae41b596-75b2-46ab-b95a-ef7b41f1e66b/kube-rbac-proxy/0.log" Dec 05 16:08:12 crc kubenswrapper[4840]: I1205 16:08:12.700467 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-spqrr_ae41b596-75b2-46ab-b95a-ef7b41f1e66b/manager/0.log" Dec 05 16:08:13 crc kubenswrapper[4840]: I1205 16:08:13.338290 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-xgflv_6c27d60c-a8e1-4616-88b2-391876d4112d/kube-rbac-proxy/0.log" Dec 05 16:08:13 crc kubenswrapper[4840]: I1205 16:08:13.346149 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-xgflv_6c27d60c-a8e1-4616-88b2-391876d4112d/manager/0.log" Dec 05 16:08:13 crc kubenswrapper[4840]: I1205 16:08:13.512797 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt_a18cf365-d0a8-4750-b11e-12d608ceb0e9/kube-rbac-proxy/0.log" Dec 05 16:08:13 crc kubenswrapper[4840]: I1205 16:08:13.568445 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4jjvdt_a18cf365-d0a8-4750-b11e-12d608ceb0e9/manager/0.log" Dec 05 16:08:13 crc kubenswrapper[4840]: I1205 16:08:13.878825 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mvhfv_e3672e49-5b56-4666-8f46-f7846e65b4ba/registry-server/0.log" Dec 05 16:08:13 crc kubenswrapper[4840]: I1205 16:08:13.975540 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7f5df65589-88fmq_6251ea61-296a-4fe0-b2a2-c6de82a74d33/operator/0.log" Dec 05 16:08:14 crc kubenswrapper[4840]: I1205 16:08:14.048462 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-q5ksl_387ba01a-b726-4c0c-b3ab-160be43d9587/kube-rbac-proxy/0.log" Dec 05 16:08:14 crc kubenswrapper[4840]: I1205 16:08:14.138309 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-q5ksl_387ba01a-b726-4c0c-b3ab-160be43d9587/manager/0.log" Dec 05 16:08:14 crc kubenswrapper[4840]: I1205 16:08:14.299790 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8jvnm_31eda6ed-1dee-4670-a6d3-22871423db53/kube-rbac-proxy/0.log" Dec 05 16:08:14 crc kubenswrapper[4840]: I1205 16:08:14.313035 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8jvnm_31eda6ed-1dee-4670-a6d3-22871423db53/manager/0.log" Dec 05 16:08:14 crc kubenswrapper[4840]: I1205 16:08:14.497455 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-r2br9_e1dca245-f390-4f32-8683-eea98ad3fb45/operator/0.log" Dec 05 16:08:14 crc kubenswrapper[4840]: I1205 16:08:14.788644 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79cb7dcf7d-pw6j8_187efc3a-77ce-4898-89d9-5785491d5d29/manager/0.log" Dec 05 16:08:14 crc kubenswrapper[4840]: I1205 16:08:14.928722 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-fvrzr_919c5760-f9dd-4b40-9b91-ea3b11d13a26/kube-rbac-proxy/0.log" Dec 05 16:08:15 crc kubenswrapper[4840]: I1205 16:08:15.006466 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-fvrzr_919c5760-f9dd-4b40-9b91-ea3b11d13a26/manager/0.log" Dec 05 16:08:15 crc kubenswrapper[4840]: I1205 16:08:15.023470 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-spvpr_f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08/kube-rbac-proxy/0.log" Dec 05 16:08:15 crc kubenswrapper[4840]: I1205 16:08:15.178066 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-spvpr_f3d60ad8-fd18-4cf1-9ac2-05ce61d52f08/manager/0.log" Dec 05 16:08:15 crc kubenswrapper[4840]: I1205 16:08:15.182989 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-g5t4m_df20a713-1c9f-4738-8401-ddff0dcf0c38/kube-rbac-proxy/0.log" Dec 05 16:08:15 crc kubenswrapper[4840]: I1205 16:08:15.185965 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-g5t4m_df20a713-1c9f-4738-8401-ddff0dcf0c38/manager/0.log" Dec 05 16:08:15 crc kubenswrapper[4840]: I1205 16:08:15.304569 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-chg27_53e9bbdb-0559-4053-b38c-395876f9d69f/kube-rbac-proxy/0.log" Dec 05 16:08:15 crc kubenswrapper[4840]: I1205 16:08:15.364535 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-chg27_53e9bbdb-0559-4053-b38c-395876f9d69f/manager/0.log" Dec 05 16:08:34 crc kubenswrapper[4840]: I1205 16:08:34.471930 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-x52gm_e281c913-e265-4ce8-af6a-11f255f6faf1/control-plane-machine-set-operator/0.log" Dec 05 16:08:34 crc kubenswrapper[4840]: I1205 16:08:34.631726 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9lj8m_891a06ac-8101-4fab-a947-2adf9d8eeb7f/machine-api-operator/0.log" Dec 05 16:08:34 crc kubenswrapper[4840]: I1205 16:08:34.634660 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-9lj8m_891a06ac-8101-4fab-a947-2adf9d8eeb7f/kube-rbac-proxy/0.log" Dec 05 16:08:47 crc kubenswrapper[4840]: I1205 16:08:47.442327 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-t8hxr_d94bca81-5539-4bc2-bbec-38a88770929d/cert-manager-controller/0.log" Dec 05 16:08:47 crc kubenswrapper[4840]: I1205 16:08:47.596766 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-n8sd9_7d96fedc-8d6a-4b34-af3e-58104249edc2/cert-manager-cainjector/0.log" Dec 05 16:08:47 crc kubenswrapper[4840]: I1205 16:08:47.687682 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-8nl6w_839aa773-117e-423c-853e-37f05ff674a1/cert-manager-webhook/0.log" Dec 05 16:09:00 crc kubenswrapper[4840]: I1205 16:09:00.513628 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-nhx2t_b15f2765-949a-4e53-a48b-1e691b8b1b37/nmstate-console-plugin/0.log" Dec 05 16:09:00 crc kubenswrapper[4840]: I1205 16:09:00.731627 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-pcnxx_8b83469a-e169-45bb-b5b7-ef32a36719f2/nmstate-handler/0.log" Dec 05 16:09:00 crc kubenswrapper[4840]: I1205 16:09:00.796294 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-rldh8_1c6815a9-6a06-4ed1-81fb-fe876b3ff5db/kube-rbac-proxy/0.log" Dec 05 16:09:00 crc kubenswrapper[4840]: I1205 16:09:00.796491 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-rldh8_1c6815a9-6a06-4ed1-81fb-fe876b3ff5db/nmstate-metrics/0.log" Dec 05 16:09:00 crc kubenswrapper[4840]: I1205 16:09:00.993952 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-8zwbs_26f97dff-d2b1-4d3d-b68a-2a8851ea6999/nmstate-operator/0.log" Dec 05 16:09:01 crc kubenswrapper[4840]: I1205 16:09:01.030355 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-2htrv_8ed0ecac-e512-440f-87ad-14e23ea9945f/nmstate-webhook/0.log" Dec 05 16:09:15 crc kubenswrapper[4840]: I1205 16:09:15.342759 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-bbxvm_233d6836-c070-4d58-8f3d-6145a065240d/kube-rbac-proxy/0.log" Dec 05 16:09:15 crc kubenswrapper[4840]: I1205 16:09:15.511937 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-bbxvm_233d6836-c070-4d58-8f3d-6145a065240d/controller/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.222112 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.382450 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.413339 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.418445 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.422721 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.554780 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.608345 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.609153 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.624110 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.794739 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-reloader/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.800425 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/controller/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.811851 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-metrics/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.830374 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/cp-frr-files/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.967534 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/frr-metrics/0.log" Dec 05 16:09:16 crc kubenswrapper[4840]: I1205 16:09:16.968575 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/kube-rbac-proxy/0.log" Dec 05 16:09:17 crc kubenswrapper[4840]: I1205 16:09:17.030431 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/kube-rbac-proxy-frr/0.log" Dec 05 16:09:17 crc kubenswrapper[4840]: I1205 16:09:17.227921 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/reloader/0.log" Dec 05 16:09:17 crc kubenswrapper[4840]: I1205 16:09:17.255540 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-xhqsd_146837b6-fb7d-4368-9ae3-bc4106ff72de/frr-k8s-webhook-server/0.log" Dec 05 16:09:18 crc kubenswrapper[4840]: I1205 16:09:18.096738 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6c5f98fb9d-c7t9b_5debe082-c97c-4bb6-8eb6-475c0b97e485/manager/0.log" Dec 05 16:09:18 crc kubenswrapper[4840]: I1205 16:09:18.292480 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-6vbng_4c47f541-43bf-400b-bb9f-e3239ad0c636/frr/0.log" Dec 05 16:09:18 crc kubenswrapper[4840]: I1205 16:09:18.296546 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-f894b867-vrfmt_b51981bb-b0ed-4c6a-b82d-ae3155eefcb5/webhook-server/0.log" Dec 05 16:09:18 crc kubenswrapper[4840]: I1205 16:09:18.370987 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8v7db_475d466e-01af-4a8b-85c9-585a152bc376/kube-rbac-proxy/0.log" Dec 05 16:09:18 crc kubenswrapper[4840]: I1205 16:09:18.776747 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8v7db_475d466e-01af-4a8b-85c9-585a152bc376/speaker/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.058733 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/util/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.220675 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/util/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.260364 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/pull/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.289113 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/pull/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.417313 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/util/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.452918 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/pull/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.484143 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fbxmrd_131b3e58-67f5-4c2a-8d70-ff674420a7a5/extract/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.597618 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/util/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.768194 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/util/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.799545 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/pull/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.844661 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/pull/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.968324 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/pull/0.log" Dec 05 16:09:30 crc kubenswrapper[4840]: I1205 16:09:30.994943 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/extract/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.015685 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gv6vt_276a5275-a612-4fa5-9aac-c252dc7cad0a/util/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.158807 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-utilities/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.298742 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-content/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.310781 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-utilities/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.321903 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-content/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.512673 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-utilities/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.606666 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/extract-content/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.701256 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-utilities/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.927127 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-content/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.940832 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-content/0.log" Dec 05 16:09:31 crc kubenswrapper[4840]: I1205 16:09:31.994307 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-utilities/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.040472 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-7mwf7_d8153f52-1255-4593-8136-083c0a618d49/registry-server/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.174922 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-content/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.183139 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/extract-utilities/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.456441 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-utilities/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.464773 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mv2cn_1581af24-4229-4cac-a548-20cafe277dff/marketplace-operator/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.532786 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nbj9s_916556b2-0a55-4c5a-8d5a-3e42bdeaec95/registry-server/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.705761 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-utilities/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.727044 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-content/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.727044 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-content/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.936264 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-content/0.log" Dec 05 16:09:32 crc kubenswrapper[4840]: I1205 16:09:32.957611 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/extract-utilities/0.log" Dec 05 16:09:33 crc kubenswrapper[4840]: I1205 16:09:33.046496 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-scqgw_6da7f018-2512-4ff2-8824-f90c5a0d5abf/registry-server/0.log" Dec 05 16:09:33 crc kubenswrapper[4840]: I1205 16:09:33.117981 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-utilities/0.log" Dec 05 16:09:33 crc kubenswrapper[4840]: I1205 16:09:33.281240 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-content/0.log" Dec 05 16:09:33 crc kubenswrapper[4840]: I1205 16:09:33.294931 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-utilities/0.log" Dec 05 16:09:33 crc kubenswrapper[4840]: I1205 16:09:33.318349 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-content/0.log" Dec 05 16:09:33 crc kubenswrapper[4840]: I1205 16:09:33.467107 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-utilities/0.log" Dec 05 16:09:33 crc kubenswrapper[4840]: I1205 16:09:33.467508 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/extract-content/0.log" Dec 05 16:09:34 crc kubenswrapper[4840]: I1205 16:09:34.281607 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-25nwq_7ef68ef3-0dac-4194-9d02-c0156a923d76/registry-server/0.log" Dec 05 16:09:49 crc kubenswrapper[4840]: I1205 16:09:49.472321 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 16:09:49 crc kubenswrapper[4840]: I1205 16:09:49.472964 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 16:10:08 crc kubenswrapper[4840]: E1205 16:10:08.200106 4840 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.195:57266->38.102.83.195:36337: write tcp 38.102.83.195:57266->38.102.83.195:36337: write: broken pipe Dec 05 16:10:19 crc kubenswrapper[4840]: I1205 16:10:19.472085 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 16:10:19 crc kubenswrapper[4840]: I1205 16:10:19.472631 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 16:10:49 crc kubenswrapper[4840]: I1205 16:10:49.471796 4840 patch_prober.go:28] interesting pod/machine-config-daemon-xxvfs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 16:10:49 crc kubenswrapper[4840]: I1205 16:10:49.472389 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 16:10:49 crc kubenswrapper[4840]: I1205 16:10:49.472449 4840 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" Dec 05 16:10:49 crc kubenswrapper[4840]: I1205 16:10:49.473411 4840 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18"} pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 16:10:49 crc kubenswrapper[4840]: I1205 16:10:49.473510 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerName="machine-config-daemon" containerID="cri-o://51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" gracePeriod=600 Dec 05 16:10:49 crc kubenswrapper[4840]: E1205 16:10:49.594162 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:10:50 crc kubenswrapper[4840]: I1205 16:10:50.315846 4840 generic.go:334] "Generic (PLEG): container finished" podID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" exitCode=0 Dec 05 16:10:50 crc kubenswrapper[4840]: I1205 16:10:50.316078 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" event={"ID":"a972c8d4-fbab-487f-a2b7-782c3195d1ef","Type":"ContainerDied","Data":"51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18"} Dec 05 16:10:50 crc kubenswrapper[4840]: I1205 16:10:50.316272 4840 scope.go:117] "RemoveContainer" containerID="24301b3a21dcacee3bf216aee4db41bf2d104b03938ab88fe2b2936fc478b964" Dec 05 16:10:50 crc kubenswrapper[4840]: I1205 16:10:50.317063 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:10:50 crc kubenswrapper[4840]: E1205 16:10:50.317432 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:11:02 crc kubenswrapper[4840]: I1205 16:11:02.073066 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:11:02 crc kubenswrapper[4840]: E1205 16:11:02.074284 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:11:13 crc kubenswrapper[4840]: I1205 16:11:13.067436 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:11:13 crc kubenswrapper[4840]: E1205 16:11:13.069315 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:11:18 crc kubenswrapper[4840]: I1205 16:11:18.636092 4840 generic.go:334] "Generic (PLEG): container finished" podID="54f86601-22c7-465e-8408-510506f3e79e" containerID="ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234" exitCode=0 Dec 05 16:11:18 crc kubenswrapper[4840]: I1205 16:11:18.636159 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dpph9/must-gather-mdm2k" event={"ID":"54f86601-22c7-465e-8408-510506f3e79e","Type":"ContainerDied","Data":"ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234"} Dec 05 16:11:18 crc kubenswrapper[4840]: I1205 16:11:18.637460 4840 scope.go:117] "RemoveContainer" containerID="ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234" Dec 05 16:11:19 crc kubenswrapper[4840]: I1205 16:11:19.537661 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dpph9_must-gather-mdm2k_54f86601-22c7-465e-8408-510506f3e79e/gather/0.log" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.044109 4840 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wkwq8"] Dec 05 16:11:23 crc kubenswrapper[4840]: E1205 16:11:23.048388 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="extract-content" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.048416 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="extract-content" Dec 05 16:11:23 crc kubenswrapper[4840]: E1205 16:11:23.048432 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="registry-server" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.048438 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="registry-server" Dec 05 16:11:23 crc kubenswrapper[4840]: E1205 16:11:23.048475 4840 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="extract-utilities" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.048482 4840 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="extract-utilities" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.048695 4840 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd140a3e-3b46-40cc-a885-9c63346f92e9" containerName="registry-server" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.050181 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.059148 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wkwq8"] Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.149737 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-catalog-content\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.150082 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-utilities\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.150334 4840 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drv5l\" (UniqueName: \"kubernetes.io/projected/f359863e-d392-48c7-a97a-b6608cc695f2-kube-api-access-drv5l\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.252749 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drv5l\" (UniqueName: \"kubernetes.io/projected/f359863e-d392-48c7-a97a-b6608cc695f2-kube-api-access-drv5l\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.252899 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-catalog-content\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.252966 4840 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-utilities\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.253493 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-utilities\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.253570 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-catalog-content\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.271574 4840 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drv5l\" (UniqueName: \"kubernetes.io/projected/f359863e-d392-48c7-a97a-b6608cc695f2-kube-api-access-drv5l\") pod \"certified-operators-wkwq8\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.376286 4840 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:23 crc kubenswrapper[4840]: I1205 16:11:23.985525 4840 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wkwq8"] Dec 05 16:11:24 crc kubenswrapper[4840]: I1205 16:11:24.066940 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:11:24 crc kubenswrapper[4840]: E1205 16:11:24.067339 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:11:24 crc kubenswrapper[4840]: I1205 16:11:24.717575 4840 generic.go:334] "Generic (PLEG): container finished" podID="f359863e-d392-48c7-a97a-b6608cc695f2" containerID="52afbb368f8542749d4d5d286560c25c1de764e986698e4b8005db8ef222fd98" exitCode=0 Dec 05 16:11:24 crc kubenswrapper[4840]: I1205 16:11:24.717685 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkwq8" event={"ID":"f359863e-d392-48c7-a97a-b6608cc695f2","Type":"ContainerDied","Data":"52afbb368f8542749d4d5d286560c25c1de764e986698e4b8005db8ef222fd98"} Dec 05 16:11:24 crc kubenswrapper[4840]: I1205 16:11:24.717884 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkwq8" event={"ID":"f359863e-d392-48c7-a97a-b6608cc695f2","Type":"ContainerStarted","Data":"be1011eef7aeaff330fd6bcdcd5967b8b78cf0209e0d66a53a0aea5b7ecdbb4f"} Dec 05 16:11:24 crc kubenswrapper[4840]: I1205 16:11:24.721529 4840 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 16:11:25 crc kubenswrapper[4840]: I1205 16:11:25.727569 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkwq8" event={"ID":"f359863e-d392-48c7-a97a-b6608cc695f2","Type":"ContainerStarted","Data":"ff6f39c014a5c5691118f434b417901a10bf0ce18fc4b5a787e745763e9f5d0c"} Dec 05 16:11:26 crc kubenswrapper[4840]: I1205 16:11:26.739505 4840 generic.go:334] "Generic (PLEG): container finished" podID="f359863e-d392-48c7-a97a-b6608cc695f2" containerID="ff6f39c014a5c5691118f434b417901a10bf0ce18fc4b5a787e745763e9f5d0c" exitCode=0 Dec 05 16:11:26 crc kubenswrapper[4840]: I1205 16:11:26.739680 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkwq8" event={"ID":"f359863e-d392-48c7-a97a-b6608cc695f2","Type":"ContainerDied","Data":"ff6f39c014a5c5691118f434b417901a10bf0ce18fc4b5a787e745763e9f5d0c"} Dec 05 16:11:27 crc kubenswrapper[4840]: I1205 16:11:27.750240 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkwq8" event={"ID":"f359863e-d392-48c7-a97a-b6608cc695f2","Type":"ContainerStarted","Data":"2114c92227a5e6aa0f6387504e961a7e5ca0b0be702a18a823130d924df45522"} Dec 05 16:11:27 crc kubenswrapper[4840]: I1205 16:11:27.775531 4840 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wkwq8" podStartSLOduration=2.125986999 podStartE2EDuration="4.775514715s" podCreationTimestamp="2025-12-05 16:11:23 +0000 UTC" firstStartedPulling="2025-12-05 16:11:24.721171275 +0000 UTC m=+4363.062233889" lastFinishedPulling="2025-12-05 16:11:27.370698991 +0000 UTC m=+4365.711761605" observedRunningTime="2025-12-05 16:11:27.775039681 +0000 UTC m=+4366.116102335" watchObservedRunningTime="2025-12-05 16:11:27.775514715 +0000 UTC m=+4366.116577329" Dec 05 16:11:31 crc kubenswrapper[4840]: I1205 16:11:31.581919 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dpph9/must-gather-mdm2k"] Dec 05 16:11:31 crc kubenswrapper[4840]: I1205 16:11:31.582828 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-dpph9/must-gather-mdm2k" podUID="54f86601-22c7-465e-8408-510506f3e79e" containerName="copy" containerID="cri-o://304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72" gracePeriod=2 Dec 05 16:11:31 crc kubenswrapper[4840]: I1205 16:11:31.594646 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dpph9/must-gather-mdm2k"] Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.475305 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dpph9_must-gather-mdm2k_54f86601-22c7-465e-8408-510506f3e79e/copy/0.log" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.476124 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.549287 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvwnw\" (UniqueName: \"kubernetes.io/projected/54f86601-22c7-465e-8408-510506f3e79e-kube-api-access-pvwnw\") pod \"54f86601-22c7-465e-8408-510506f3e79e\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.549389 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/54f86601-22c7-465e-8408-510506f3e79e-must-gather-output\") pod \"54f86601-22c7-465e-8408-510506f3e79e\" (UID: \"54f86601-22c7-465e-8408-510506f3e79e\") " Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.557148 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54f86601-22c7-465e-8408-510506f3e79e-kube-api-access-pvwnw" (OuterVolumeSpecName: "kube-api-access-pvwnw") pod "54f86601-22c7-465e-8408-510506f3e79e" (UID: "54f86601-22c7-465e-8408-510506f3e79e"). InnerVolumeSpecName "kube-api-access-pvwnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.651665 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvwnw\" (UniqueName: \"kubernetes.io/projected/54f86601-22c7-465e-8408-510506f3e79e-kube-api-access-pvwnw\") on node \"crc\" DevicePath \"\"" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.698075 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54f86601-22c7-465e-8408-510506f3e79e-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "54f86601-22c7-465e-8408-510506f3e79e" (UID: "54f86601-22c7-465e-8408-510506f3e79e"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.753470 4840 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/54f86601-22c7-465e-8408-510506f3e79e-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.802125 4840 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dpph9_must-gather-mdm2k_54f86601-22c7-465e-8408-510506f3e79e/copy/0.log" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.802608 4840 generic.go:334] "Generic (PLEG): container finished" podID="54f86601-22c7-465e-8408-510506f3e79e" containerID="304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72" exitCode=143 Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.802685 4840 scope.go:117] "RemoveContainer" containerID="304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.802736 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dpph9/must-gather-mdm2k" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.825374 4840 scope.go:117] "RemoveContainer" containerID="ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.892356 4840 scope.go:117] "RemoveContainer" containerID="304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72" Dec 05 16:11:32 crc kubenswrapper[4840]: E1205 16:11:32.893225 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72\": container with ID starting with 304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72 not found: ID does not exist" containerID="304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.893256 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72"} err="failed to get container status \"304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72\": rpc error: code = NotFound desc = could not find container \"304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72\": container with ID starting with 304b8ddfa57557777cfbca4aedb1da2c31997ca475528c6e93ae70d699b7ad72 not found: ID does not exist" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.893280 4840 scope.go:117] "RemoveContainer" containerID="ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234" Dec 05 16:11:32 crc kubenswrapper[4840]: E1205 16:11:32.893594 4840 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234\": container with ID starting with ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234 not found: ID does not exist" containerID="ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234" Dec 05 16:11:32 crc kubenswrapper[4840]: I1205 16:11:32.893616 4840 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234"} err="failed to get container status \"ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234\": rpc error: code = NotFound desc = could not find container \"ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234\": container with ID starting with ca4366974bcf4526022cbbbac553a21b0c56e3d81a221c0514f8dfbfe1c09234 not found: ID does not exist" Dec 05 16:11:33 crc kubenswrapper[4840]: I1205 16:11:33.377457 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:33 crc kubenswrapper[4840]: I1205 16:11:33.377857 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:33 crc kubenswrapper[4840]: I1205 16:11:33.456851 4840 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:33 crc kubenswrapper[4840]: I1205 16:11:33.857322 4840 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:33 crc kubenswrapper[4840]: I1205 16:11:33.911243 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wkwq8"] Dec 05 16:11:34 crc kubenswrapper[4840]: I1205 16:11:34.080261 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54f86601-22c7-465e-8408-510506f3e79e" path="/var/lib/kubelet/pods/54f86601-22c7-465e-8408-510506f3e79e/volumes" Dec 05 16:11:35 crc kubenswrapper[4840]: I1205 16:11:35.834196 4840 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wkwq8" podUID="f359863e-d392-48c7-a97a-b6608cc695f2" containerName="registry-server" containerID="cri-o://2114c92227a5e6aa0f6387504e961a7e5ca0b0be702a18a823130d924df45522" gracePeriod=2 Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.846028 4840 generic.go:334] "Generic (PLEG): container finished" podID="f359863e-d392-48c7-a97a-b6608cc695f2" containerID="2114c92227a5e6aa0f6387504e961a7e5ca0b0be702a18a823130d924df45522" exitCode=0 Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.846100 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkwq8" event={"ID":"f359863e-d392-48c7-a97a-b6608cc695f2","Type":"ContainerDied","Data":"2114c92227a5e6aa0f6387504e961a7e5ca0b0be702a18a823130d924df45522"} Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.846424 4840 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkwq8" event={"ID":"f359863e-d392-48c7-a97a-b6608cc695f2","Type":"ContainerDied","Data":"be1011eef7aeaff330fd6bcdcd5967b8b78cf0209e0d66a53a0aea5b7ecdbb4f"} Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.846441 4840 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be1011eef7aeaff330fd6bcdcd5967b8b78cf0209e0d66a53a0aea5b7ecdbb4f" Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.908810 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.939768 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drv5l\" (UniqueName: \"kubernetes.io/projected/f359863e-d392-48c7-a97a-b6608cc695f2-kube-api-access-drv5l\") pod \"f359863e-d392-48c7-a97a-b6608cc695f2\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.939952 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-catalog-content\") pod \"f359863e-d392-48c7-a97a-b6608cc695f2\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.940012 4840 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-utilities\") pod \"f359863e-d392-48c7-a97a-b6608cc695f2\" (UID: \"f359863e-d392-48c7-a97a-b6608cc695f2\") " Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.940820 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-utilities" (OuterVolumeSpecName: "utilities") pod "f359863e-d392-48c7-a97a-b6608cc695f2" (UID: "f359863e-d392-48c7-a97a-b6608cc695f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:11:36 crc kubenswrapper[4840]: I1205 16:11:36.947097 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f359863e-d392-48c7-a97a-b6608cc695f2-kube-api-access-drv5l" (OuterVolumeSpecName: "kube-api-access-drv5l") pod "f359863e-d392-48c7-a97a-b6608cc695f2" (UID: "f359863e-d392-48c7-a97a-b6608cc695f2"). InnerVolumeSpecName "kube-api-access-drv5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.000091 4840 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f359863e-d392-48c7-a97a-b6608cc695f2" (UID: "f359863e-d392-48c7-a97a-b6608cc695f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.042450 4840 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.042484 4840 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f359863e-d392-48c7-a97a-b6608cc695f2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.042495 4840 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drv5l\" (UniqueName: \"kubernetes.io/projected/f359863e-d392-48c7-a97a-b6608cc695f2-kube-api-access-drv5l\") on node \"crc\" DevicePath \"\"" Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.068554 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:11:37 crc kubenswrapper[4840]: E1205 16:11:37.068796 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.857265 4840 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkwq8" Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.891572 4840 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wkwq8"] Dec 05 16:11:37 crc kubenswrapper[4840]: I1205 16:11:37.902077 4840 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wkwq8"] Dec 05 16:11:38 crc kubenswrapper[4840]: I1205 16:11:38.077805 4840 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f359863e-d392-48c7-a97a-b6608cc695f2" path="/var/lib/kubelet/pods/f359863e-d392-48c7-a97a-b6608cc695f2/volumes" Dec 05 16:11:51 crc kubenswrapper[4840]: I1205 16:11:51.066856 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:11:51 crc kubenswrapper[4840]: E1205 16:11:51.067707 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:12:04 crc kubenswrapper[4840]: I1205 16:12:04.067728 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:12:04 crc kubenswrapper[4840]: E1205 16:12:04.069024 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:12:17 crc kubenswrapper[4840]: I1205 16:12:17.068038 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:12:17 crc kubenswrapper[4840]: E1205 16:12:17.069504 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:12:25 crc kubenswrapper[4840]: I1205 16:12:25.784352 4840 scope.go:117] "RemoveContainer" containerID="783f1812fcc1a72277a9cee9d9f94b6b2496b0cd4e4bbe2a9633be170e4ca4e1" Dec 05 16:12:25 crc kubenswrapper[4840]: I1205 16:12:25.820677 4840 scope.go:117] "RemoveContainer" containerID="b3c38d035d9d90210a2971ab0b28b58bd1a533e193a0ae21378f3297a3ae7f46" Dec 05 16:12:25 crc kubenswrapper[4840]: I1205 16:12:25.859333 4840 scope.go:117] "RemoveContainer" containerID="ec6ac6f1548a1f246e8d430aaff27fce09a7983b5ce5d161fed4e900f4343164" Dec 05 16:12:32 crc kubenswrapper[4840]: I1205 16:12:32.075350 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:12:32 crc kubenswrapper[4840]: E1205 16:12:32.076046 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:12:45 crc kubenswrapper[4840]: I1205 16:12:45.145425 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:12:45 crc kubenswrapper[4840]: E1205 16:12:45.155683 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:12:59 crc kubenswrapper[4840]: I1205 16:12:59.067084 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:12:59 crc kubenswrapper[4840]: E1205 16:12:59.067896 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:13:11 crc kubenswrapper[4840]: I1205 16:13:11.066970 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:13:11 crc kubenswrapper[4840]: E1205 16:13:11.067749 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:13:25 crc kubenswrapper[4840]: I1205 16:13:25.067182 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:13:25 crc kubenswrapper[4840]: E1205 16:13:25.068199 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:13:40 crc kubenswrapper[4840]: I1205 16:13:40.067205 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:13:40 crc kubenswrapper[4840]: E1205 16:13:40.068133 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:13:46 crc kubenswrapper[4840]: I1205 16:13:46.473594 4840 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-pczb2 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 16:13:46 crc kubenswrapper[4840]: I1205 16:13:46.474472 4840 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" podUID="237e4a75-4edd-4622-87f4-03a1f620649d" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 16:13:46 crc kubenswrapper[4840]: I1205 16:13:46.475270 4840 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-pczb2 container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": context deadline exceeded" start-of-body= Dec 05 16:13:46 crc kubenswrapper[4840]: I1205 16:13:46.475354 4840 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-pczb2" podUID="237e4a75-4edd-4622-87f4-03a1f620649d" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": context deadline exceeded" Dec 05 16:13:51 crc kubenswrapper[4840]: I1205 16:13:51.066898 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:13:51 crc kubenswrapper[4840]: E1205 16:13:51.067969 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:14:02 crc kubenswrapper[4840]: I1205 16:14:02.093402 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:14:02 crc kubenswrapper[4840]: E1205 16:14:02.095515 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:14:16 crc kubenswrapper[4840]: I1205 16:14:16.066667 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:14:16 crc kubenswrapper[4840]: E1205 16:14:16.067399 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:14:30 crc kubenswrapper[4840]: I1205 16:14:30.067529 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:14:30 crc kubenswrapper[4840]: E1205 16:14:30.069764 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" Dec 05 16:14:45 crc kubenswrapper[4840]: I1205 16:14:45.066714 4840 scope.go:117] "RemoveContainer" containerID="51a59f3f90aefa5d4a8e87aadf3bb93ef161c12368cbe20bc6ad4322dcd88e18" Dec 05 16:14:45 crc kubenswrapper[4840]: E1205 16:14:45.067385 4840 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-xxvfs_openshift-machine-config-operator(a972c8d4-fbab-487f-a2b7-782c3195d1ef)\"" pod="openshift-machine-config-operator/machine-config-daemon-xxvfs" podUID="a972c8d4-fbab-487f-a2b7-782c3195d1ef" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114602403024441 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114602404017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114571141016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114571142015456 5ustar corecore